databased.databased
1import sqlite3 2from typing import Any 3 4import loggi 5from griddle import griddy 6from pathier import Pathier, Pathish 7 8 9def dict_factory(cursor: sqlite3.Cursor, row: tuple) -> dict: 10 fields = [column[0] for column in cursor.description] 11 return {column: value for column, value in zip(fields, row)} 12 13 14class Databased: 15 """SQLite3 wrapper. 16 17 Anytime `Databased.query()` is called, a connection to the database will be opened if it isn't already open. 18 19 (All builtin class functions that access the database do so through the query method.) 20 21 Connections, however, need to be closed manually. 22 23 Manually closing the connection can be avoiding by using `Databased` with a context manager, which will close the connection upon exiting: 24 >>> with Databased() as db: 25 >>> # connection closed 26 >>> rows = db.select("some_table") 27 >>> # connection opened 28 >>> # connection closed 29 30 Data is returned as a list of dictionaries where each dictionary is `{"column": value}`. 31 32 """ 33 34 def __init__( 35 self, 36 dbpath: Pathish = "db.sqlite3", 37 connection_timeout: float = 10, 38 detect_types: bool = True, 39 enforce_foreign_keys: bool = True, 40 commit_on_close: bool = True, 41 log_dir: Pathish | None = None, 42 ): 43 """ 44 :params: 45 * `dbpath`: The path to the database file. Will be created if it doesn't exist. 46 * `connection_timeout`: How long (in seconds) to wait before raising an exception when trying to connect to the database. 47 * `detect_types`: Whether columns with values that can be converted to Python objects should be, 48 i.e. `TIMESTAMP` table data can be recieved and is converted to, upon retrieval, a `datetime.datetime` object. 49 * `enforce_foreign_keys`: Whether to enfore foreign key constraints. 50 * `commit_on_close`: Whether to automatically commit transactions when the connection is closed. 51 * `log_dir`: The directory the transaction log should be saved in. If `None`, it'll be saved in the same directory as the database file. 52 """ 53 self.path = dbpath 54 self.connection_timeout = connection_timeout 55 self.connection = None 56 self._logger_init(log_dir) 57 self.detect_types = detect_types 58 self.commit_on_close = commit_on_close 59 self.enforce_foreign_keys = enforce_foreign_keys 60 61 def __enter__(self): 62 self.connect() 63 return self 64 65 def __exit__(self, *args, **kwargs): 66 self.close() 67 68 @property 69 def commit_on_close(self) -> bool: 70 """Should commit database before closing connection when `self.close()` is called.""" 71 return self._commit_on_close 72 73 @commit_on_close.setter 74 def commit_on_close(self, should_commit_on_close: bool): 75 self._commit_on_close = should_commit_on_close 76 77 @property 78 def connected(self) -> bool: 79 """Whether this `Databased` instance is connected to the database file or not.""" 80 return self.connection is not None 81 82 @property 83 def connection_timeout(self) -> float: 84 """Changes to this property won't take effect until the current connection, if open, is closed and a new connection opened.""" 85 return self._connection_timeout 86 87 @connection_timeout.setter 88 def connection_timeout(self, timeout: float): 89 self._connection_timeout = timeout 90 91 @property 92 def detect_types(self) -> bool: 93 """Should use `detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES` when establishing a database connection. 94 95 Changes to this property won't take effect until the current connection, if open, is closed and a new connection opened. 96 """ 97 return self._detect_types 98 99 @detect_types.setter 100 def detect_types(self, should_detect: bool): 101 self._detect_types = should_detect 102 103 @property 104 def enforce_foreign_keys(self) -> bool: 105 return self._enforce_foreign_keys 106 107 @enforce_foreign_keys.setter 108 def enforce_foreign_keys(self, should_enforce: bool): 109 self._enforce_foreign_keys = should_enforce 110 self._set_foreign_key_enforcement() 111 112 @property 113 def indicies(self) -> list[str]: 114 """List of indicies for this database.""" 115 return [ 116 table["name"] 117 for table in self.query( 118 "SELECT name FROM sqlite_Schema WHERE type = 'index';" 119 ) 120 ] 121 122 @property 123 def name(self) -> str: 124 """The name of this database.""" 125 return self.path.stem 126 127 @property 128 def path(self) -> Pathier: 129 """The path to this database file.""" 130 return self._path 131 132 @path.setter 133 def path(self, new_path: Pathish): 134 """If `new_path` doesn't exist, it will be created (including parent folders).""" 135 self._path = Pathier(new_path) 136 if not self.path.exists(): 137 self.path.touch() 138 139 @property 140 def tables(self) -> list[str]: 141 """List of table names for this database.""" 142 return [ 143 table["name"] 144 for table in self.query( 145 "SELECT name FROM sqlite_Schema WHERE type = 'table' AND name NOT LIKE 'sqlite_%';" 146 ) 147 ] 148 149 @property 150 def views(self) -> list[str]: 151 """List of view for this database.""" 152 return [ 153 table["name"] 154 for table in self.query( 155 "SELECT name FROM sqlite_Schema WHERE type = 'view' AND name NOT LIKE 'sqlite_%';" 156 ) 157 ] 158 159 def _logger_init(self, log_path: Pathish | None = None): 160 """:param: `message_format`: `{` style format string.""" 161 self.logger = loggi.getLogger( 162 self.name, Pathier(log_path) if log_path else Pathier.cwd() 163 ) 164 165 def _prepare_insert_queries( 166 self, table: str, columns: tuple[str, ...], values: list[tuple[Any, ...]] 167 ) -> list[tuple[str, tuple[Any, ...]]]: 168 """Format a list of insert statements. 169 170 The returned value is a list because `values` will be broken up into chunks. 171 172 Each list element is a two tuple consisting of the parameterized query string and a tuple of values. 173 """ 174 inserts = [] 175 max_row_count = 900 176 column_list = "(" + ", ".join(columns) + ")" 177 for i in range(0, len(values), max_row_count): 178 chunk = values[i : i + max_row_count] 179 placeholder = ( 180 "(" + "),(".join((", ".join(("?" for _ in row)) for row in chunk)) + ")" 181 ) 182 flattened_values = tuple((value for row in chunk for value in row)) 183 inserts.append( 184 ( 185 f"INSERT INTO {table} {column_list} VALUES {placeholder};", 186 flattened_values, 187 ) 188 ) 189 return inserts 190 191 def _set_foreign_key_enforcement(self): 192 if self.connection: 193 self.connection.execute( 194 f"pragma foreign_keys = {int(self.enforce_foreign_keys)};" 195 ) 196 197 def add_column(self, table: str, column_def: str): 198 """Add a column to `table`. 199 200 `column_def` should be in the form `{column_name} {type_name} {constraint}`. 201 202 i.e. 203 >>> db = Databased() 204 >>> db.add_column("rides", "num_stops INTEGER NOT NULL DEFAULT 0")""" 205 self.query(f"ALTER TABLE {table} ADD {column_def};") 206 207 def close(self): 208 """Disconnect from the database. 209 210 Does not call `commit()` for you unless the `commit_on_close` property is set to `True`. 211 """ 212 if self.connection: 213 if self.commit_on_close: 214 self.commit() 215 self.connection.close() 216 self.connection = None 217 218 def commit(self): 219 """Commit state of database.""" 220 if self.connection: 221 self.connection.commit() 222 self.logger.info("Committed successfully.") 223 else: 224 raise RuntimeError( 225 "Databased.commit(): Can't commit db with no open connection." 226 ) 227 228 def connect(self): 229 """Connect to the database.""" 230 self.connection = sqlite3.connect( 231 self.path, 232 detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES 233 if self.detect_types 234 else 0, 235 timeout=self.connection_timeout, 236 ) 237 self._set_foreign_key_enforcement() 238 self.connection.row_factory = dict_factory 239 240 def count( 241 self, 242 table: str, 243 column: str = "*", 244 where: str | None = None, 245 distinct: bool = False, 246 ) -> int: 247 """Return number of matching rows in `table` table. 248 249 Equivalent to: 250 >>> SELECT COUNT({distinct} {column}) FROM {table} {where};""" 251 query = ( 252 f"SELECT COUNT( {('DISTINCT' if distinct else '')} {column}) FROM {table}" 253 ) 254 if where: 255 query += f" WHERE {where}" 256 query += ";" 257 return int(list(self.query(query)[0].values())[0]) 258 259 def create_table(self, table: str, *column_defs: str): 260 """Create a table if it doesn't exist. 261 262 #### :params: 263 264 `table`: Name of the table to create. 265 266 `column_defs`: Any number of column names and their definitions in proper Sqlite3 sytax. 267 i.e. `"column_name TEXT UNIQUE"` or `"column_name INTEGER PRIMARY KEY"` etc.""" 268 columns = ", ".join(column_defs) 269 result = self.query(f"CREATE TABLE IF NOT EXISTS {table} ({columns});") 270 self.logger.info(f"'{table}' table created.") 271 272 def delete(self, table: str, where: str | None = None) -> int: 273 """Delete rows from `table` that satisfy the given `where` clause. 274 275 If `where` is `None`, all rows will be deleted. 276 277 Returns the number of deleted rows. 278 279 e.g. 280 >>> db = Databased() 281 >>> db.delete("rides", "distance < 5 AND average_speed < 7")""" 282 try: 283 if where: 284 self.query(f"DELETE FROM {table} WHERE {where};") 285 else: 286 self.query(f"DELETE FROM {table};") 287 row_count = self.cursor.rowcount 288 self.logger.info( 289 f"Deleted {row_count} rows from '{table}' where '{where}'." 290 ) 291 return row_count 292 except Exception as e: 293 self.logger.exception( 294 f"Error deleting rows from '{table}' where '{where}'." 295 ) 296 raise e 297 298 def describe(self, table: str) -> list[dict]: 299 """Returns information about `table`.""" 300 return self.query(f"pragma table_info('{table}');") 301 302 def drop_column(self, table: str, column: str): 303 """Drop `column` from `table`.""" 304 self.query(f"ALTER TABLE {table} DROP {column};") 305 306 def drop_table(self, table: str) -> bool: 307 """Drop `table` from the database. 308 309 Returns `True` if successful, `False` if not.""" 310 try: 311 self.query(f"DROP TABLE {table};") 312 self.logger.info(f"Dropped table '{table}'.") 313 return True 314 except Exception as e: 315 print(f"{type(e).__name__}: {e}") 316 self.logger.error(f"Failed to drop table '{table}'.") 317 return False 318 319 def execute_script(self, path: Pathish, encoding: str = "utf-8") -> list[dict]: 320 """Execute sql script located at `path`.""" 321 if not self.connected: 322 self.connect() 323 assert self.connection 324 return self.connection.executescript( 325 Pathier(path).read_text(encoding) 326 ).fetchall() 327 328 def get_columns(self, table: str) -> tuple[str, ...]: 329 """Returns a list of column names in `table`.""" 330 return tuple( 331 (column["name"] for column in self.query(f"pragma table_info('{table}');")) 332 ) 333 334 def insert( 335 self, table: str, columns: tuple[str, ...], values: list[tuple[Any, ...]] 336 ) -> int: 337 """Insert rows of `values` into `columns` of `table`. 338 339 Each `tuple` in `values` corresponds to an individual row that is to be inserted. 340 """ 341 row_count = 0 342 for insert in self._prepare_insert_queries(table, columns, values): 343 try: 344 self.query(insert[0], insert[1]) 345 row_count += self.cursor.rowcount 346 self.logger.info(f"Inserted {row_count} rows into '{table}' table.") 347 except Exception as e: 348 self.logger.exception(f"Error inserting rows into '{table}' table.") 349 raise e 350 return row_count 351 352 def query(self, query_: str, parameters: tuple[Any, ...] = tuple()) -> list[dict]: 353 """Execute an SQL query and return the results. 354 355 Ensures that the database connection is opened before executing the command. 356 357 The cursor used to execute the query will be available through `self.cursor` until the next time `self.query()` is called. 358 """ 359 if not self.connected: 360 self.connect() 361 assert self.connection 362 self.cursor = self.connection.cursor() 363 self.cursor.execute(query_, parameters) 364 return self.cursor.fetchall() 365 366 def rename_column(self, table: str, column_to_rename: str, new_column_name: str): 367 """Rename a column in `table`.""" 368 self.query( 369 f"ALTER TABLE {table} RENAME {column_to_rename} TO {new_column_name};" 370 ) 371 372 def rename_table(self, table_to_rename: str, new_table_name: str): 373 """Rename a table.""" 374 self.query(f"ALTER TABLE {table_to_rename} RENAME TO {new_table_name};") 375 376 def select( 377 self, 378 table: str, 379 columns: list[str] = ["*"], 380 joins: list[str] | None = None, 381 where: str | None = None, 382 group_by: str | None = None, 383 having: str | None = None, 384 order_by: str | None = None, 385 limit: int | str | None = None, 386 exclude_columns: list[str] | None = None, 387 ) -> list[dict]: 388 """Return rows for given criteria. 389 390 If `exclude_columns` is given, `columns` will be ignored and data will be returned with all columns except the ones specified by `exclude_columns`. 391 392 For complex queries, use the `databased.query()` method. 393 394 Parameters `where`, `group_by`, `having`, `order_by`, and `limit` should not have 395 their corresponding key word in their string, but should otherwise be valid SQL. 396 397 `joins` should contain their key word (`INNER JOIN`, `LEFT JOIN`) in addition to the rest of the sub-statement. 398 399 >>> Databased().select( 400 "bike_rides", 401 ["id", "date", "distance", "moving_time", "AVG(distance/moving_time) as average_speed"], 402 where="distance > 20", 403 order_by="distance", 404 desc=True, 405 limit=10 406 ) 407 executes the query: 408 >>> SELECT 409 id, date, distance, moving_time, AVG(distance/moving_time) as average_speed 410 FROM 411 bike_rides 412 WHERE 413 distance > 20 414 ORDER BY 415 distance DESC 416 Limit 10;""" 417 # Assume implicit `[*]` for `columns` param when `exclude_columns` is used. 418 if exclude_columns: 419 columns = [ 420 column 421 for column in self.get_columns(table) 422 if column not in exclude_columns 423 ] 424 query = f"SELECT {', '.join(columns)} FROM {table}" 425 if joins: 426 query += f" {' '.join(joins)}" 427 if where: 428 query += f" WHERE {where}" 429 if group_by: 430 query += f" GROUP BY {group_by}" 431 if having: 432 query += f" HAVING {having}" 433 if order_by: 434 query += f" ORDER BY {order_by}" 435 if limit: 436 query += f" LIMIT {limit}" 437 query += ";" 438 rows = self.query(query) 439 return rows 440 441 @staticmethod 442 def to_grid(data: list[dict], shrink_to_terminal: bool = True) -> str: 443 """Returns a tabular grid from `data`. 444 445 If `shrink_to_terminal` is `True`, the column widths of the grid will be reduced to fit within the current terminal. 446 """ 447 return griddy(data, "keys", shrink_to_terminal) 448 449 def update( 450 self, table: str, column: str, value: Any, where: str | None = None 451 ) -> int: 452 """Update `column` of `table` to `value` for rows satisfying the conditions in `where`. 453 454 If `where` is `None` all rows will be updated. 455 456 Returns the number of updated rows. 457 458 e.g. 459 >>> db = Databased() 460 >>> db.update("rides", "elevation", 100, "elevation < 100")""" 461 try: 462 if where: 463 self.query(f"UPDATE {table} SET {column} = ? WHERE {where};", (value,)) 464 else: 465 self.query(f"UPDATE {table} SET {column} = ?;", (value,)) 466 row_count = self.cursor.rowcount 467 self.logger.info( 468 f"Updated {row_count} rows in '{table}' table to '{column}' = '{value}' where '{where}'." 469 ) 470 return row_count 471 except Exception as e: 472 self.logger.exception( 473 f"Failed to update rows in '{table}' table to '{column}' = '{value}' where '{where}'." 474 ) 475 raise e 476 477 def vacuum(self) -> int: 478 """Reduce disk size of database after row/table deletion. 479 480 Returns space freed up in bytes.""" 481 size = self.path.size 482 self.query("VACUUM;") 483 return size - self.path.size 484 485 # Seat ========================== Database Dump ========================================= 486 487 def _format_column_def(self, description: dict) -> str: 488 name = description["name"] 489 type_ = description["type"] 490 primary_key = bool(description["pk"]) 491 not_null = bool(description["notnull"]) 492 default = description["dflt_value"] 493 column = f"{name} {type_}" 494 if primary_key: 495 column += f" PRIMARY KEY" 496 if not_null: 497 column += f" NOT NULL" 498 if default: 499 if isinstance(default, str): 500 default = f"{default}" 501 column += f" DEFAULT {default}" 502 return column 503 504 def _format_table_data(self, table: str) -> str: 505 columns = self.get_columns(table) 506 rows = [tuple(row.values()) for row in self.select(table)] 507 inserts = self._prepare_insert_queries(table, columns, rows) 508 insert_strings = [] 509 indent = " " * 4 510 for insert in inserts: 511 text = insert[0] 512 sub = "^$data$based$^" 513 text = text.replace("?", sub) 514 for value in insert[1]: 515 if not value: 516 value = "" 517 if isinstance(value, bool): 518 value = int(value) 519 if not isinstance(value, int) and (not isinstance(value, float)): 520 if isinstance(value, str): 521 value = value.replace('"', "'") 522 value = f'"{value}"' 523 text = text.replace(sub, str(value), 1) 524 for pair in [ 525 ("INSERT INTO ", f"INSERT INTO\n{indent}"), 526 (") VALUES (", f")\nVALUES\n{indent}("), 527 ("),", f"),\n{indent}"), 528 ]: 529 text = text.replace(pair[0], pair[1]) 530 insert_strings.append(text) 531 return "\n".join(insert_strings) 532 533 def _format_table_def(self, table: str) -> str: 534 description = self.describe(table) 535 indent = " " * 4 536 columns = ",\n".join( 537 (f"{indent * 2}{self._format_column_def(column)}" for column in description) 538 ) 539 table_def = ( 540 "CREATE TABLE IF NOT EXISTS\n" 541 + f"{indent}{table} (\n" 542 + columns 543 + f"\n{indent});" 544 ) 545 return table_def 546 547 def _get_data_dump_string(self, tables: list[str]) -> str: 548 return "\n\n".join((self._format_table_data(table) for table in tables)) 549 550 def _get_schema_dump_string(self, tables: list[str]) -> str: 551 return "\n\n".join((self._format_table_def(table) for table in tables)) 552 553 def dump_data(self, path: Pathish, tables: list[str] | None = None): 554 """Create a data dump file for the specified tables or all tables, if none are given.""" 555 tables = tables or self.tables 556 path = Pathier(path) 557 path.write_text(self._get_data_dump_string(tables), encoding="utf-8") 558 559 def dump_schema(self, path: Pathish, tables: list[str] | None = None): 560 """Create a schema dump file for the specified tables or all tables, if none are given. 561 562 NOTE: Foreign key relationships/constraints are not preserved when dumping the schema. 563 """ 564 tables = tables or self.tables 565 path = Pathier(path) 566 path.write_text(self._get_schema_dump_string(tables), encoding="utf-8")
15class Databased: 16 """SQLite3 wrapper. 17 18 Anytime `Databased.query()` is called, a connection to the database will be opened if it isn't already open. 19 20 (All builtin class functions that access the database do so through the query method.) 21 22 Connections, however, need to be closed manually. 23 24 Manually closing the connection can be avoiding by using `Databased` with a context manager, which will close the connection upon exiting: 25 >>> with Databased() as db: 26 >>> # connection closed 27 >>> rows = db.select("some_table") 28 >>> # connection opened 29 >>> # connection closed 30 31 Data is returned as a list of dictionaries where each dictionary is `{"column": value}`. 32 33 """ 34 35 def __init__( 36 self, 37 dbpath: Pathish = "db.sqlite3", 38 connection_timeout: float = 10, 39 detect_types: bool = True, 40 enforce_foreign_keys: bool = True, 41 commit_on_close: bool = True, 42 log_dir: Pathish | None = None, 43 ): 44 """ 45 :params: 46 * `dbpath`: The path to the database file. Will be created if it doesn't exist. 47 * `connection_timeout`: How long (in seconds) to wait before raising an exception when trying to connect to the database. 48 * `detect_types`: Whether columns with values that can be converted to Python objects should be, 49 i.e. `TIMESTAMP` table data can be recieved and is converted to, upon retrieval, a `datetime.datetime` object. 50 * `enforce_foreign_keys`: Whether to enfore foreign key constraints. 51 * `commit_on_close`: Whether to automatically commit transactions when the connection is closed. 52 * `log_dir`: The directory the transaction log should be saved in. If `None`, it'll be saved in the same directory as the database file. 53 """ 54 self.path = dbpath 55 self.connection_timeout = connection_timeout 56 self.connection = None 57 self._logger_init(log_dir) 58 self.detect_types = detect_types 59 self.commit_on_close = commit_on_close 60 self.enforce_foreign_keys = enforce_foreign_keys 61 62 def __enter__(self): 63 self.connect() 64 return self 65 66 def __exit__(self, *args, **kwargs): 67 self.close() 68 69 @property 70 def commit_on_close(self) -> bool: 71 """Should commit database before closing connection when `self.close()` is called.""" 72 return self._commit_on_close 73 74 @commit_on_close.setter 75 def commit_on_close(self, should_commit_on_close: bool): 76 self._commit_on_close = should_commit_on_close 77 78 @property 79 def connected(self) -> bool: 80 """Whether this `Databased` instance is connected to the database file or not.""" 81 return self.connection is not None 82 83 @property 84 def connection_timeout(self) -> float: 85 """Changes to this property won't take effect until the current connection, if open, is closed and a new connection opened.""" 86 return self._connection_timeout 87 88 @connection_timeout.setter 89 def connection_timeout(self, timeout: float): 90 self._connection_timeout = timeout 91 92 @property 93 def detect_types(self) -> bool: 94 """Should use `detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES` when establishing a database connection. 95 96 Changes to this property won't take effect until the current connection, if open, is closed and a new connection opened. 97 """ 98 return self._detect_types 99 100 @detect_types.setter 101 def detect_types(self, should_detect: bool): 102 self._detect_types = should_detect 103 104 @property 105 def enforce_foreign_keys(self) -> bool: 106 return self._enforce_foreign_keys 107 108 @enforce_foreign_keys.setter 109 def enforce_foreign_keys(self, should_enforce: bool): 110 self._enforce_foreign_keys = should_enforce 111 self._set_foreign_key_enforcement() 112 113 @property 114 def indicies(self) -> list[str]: 115 """List of indicies for this database.""" 116 return [ 117 table["name"] 118 for table in self.query( 119 "SELECT name FROM sqlite_Schema WHERE type = 'index';" 120 ) 121 ] 122 123 @property 124 def name(self) -> str: 125 """The name of this database.""" 126 return self.path.stem 127 128 @property 129 def path(self) -> Pathier: 130 """The path to this database file.""" 131 return self._path 132 133 @path.setter 134 def path(self, new_path: Pathish): 135 """If `new_path` doesn't exist, it will be created (including parent folders).""" 136 self._path = Pathier(new_path) 137 if not self.path.exists(): 138 self.path.touch() 139 140 @property 141 def tables(self) -> list[str]: 142 """List of table names for this database.""" 143 return [ 144 table["name"] 145 for table in self.query( 146 "SELECT name FROM sqlite_Schema WHERE type = 'table' AND name NOT LIKE 'sqlite_%';" 147 ) 148 ] 149 150 @property 151 def views(self) -> list[str]: 152 """List of view for this database.""" 153 return [ 154 table["name"] 155 for table in self.query( 156 "SELECT name FROM sqlite_Schema WHERE type = 'view' AND name NOT LIKE 'sqlite_%';" 157 ) 158 ] 159 160 def _logger_init(self, log_path: Pathish | None = None): 161 """:param: `message_format`: `{` style format string.""" 162 self.logger = loggi.getLogger( 163 self.name, Pathier(log_path) if log_path else Pathier.cwd() 164 ) 165 166 def _prepare_insert_queries( 167 self, table: str, columns: tuple[str, ...], values: list[tuple[Any, ...]] 168 ) -> list[tuple[str, tuple[Any, ...]]]: 169 """Format a list of insert statements. 170 171 The returned value is a list because `values` will be broken up into chunks. 172 173 Each list element is a two tuple consisting of the parameterized query string and a tuple of values. 174 """ 175 inserts = [] 176 max_row_count = 900 177 column_list = "(" + ", ".join(columns) + ")" 178 for i in range(0, len(values), max_row_count): 179 chunk = values[i : i + max_row_count] 180 placeholder = ( 181 "(" + "),(".join((", ".join(("?" for _ in row)) for row in chunk)) + ")" 182 ) 183 flattened_values = tuple((value for row in chunk for value in row)) 184 inserts.append( 185 ( 186 f"INSERT INTO {table} {column_list} VALUES {placeholder};", 187 flattened_values, 188 ) 189 ) 190 return inserts 191 192 def _set_foreign_key_enforcement(self): 193 if self.connection: 194 self.connection.execute( 195 f"pragma foreign_keys = {int(self.enforce_foreign_keys)};" 196 ) 197 198 def add_column(self, table: str, column_def: str): 199 """Add a column to `table`. 200 201 `column_def` should be in the form `{column_name} {type_name} {constraint}`. 202 203 i.e. 204 >>> db = Databased() 205 >>> db.add_column("rides", "num_stops INTEGER NOT NULL DEFAULT 0")""" 206 self.query(f"ALTER TABLE {table} ADD {column_def};") 207 208 def close(self): 209 """Disconnect from the database. 210 211 Does not call `commit()` for you unless the `commit_on_close` property is set to `True`. 212 """ 213 if self.connection: 214 if self.commit_on_close: 215 self.commit() 216 self.connection.close() 217 self.connection = None 218 219 def commit(self): 220 """Commit state of database.""" 221 if self.connection: 222 self.connection.commit() 223 self.logger.info("Committed successfully.") 224 else: 225 raise RuntimeError( 226 "Databased.commit(): Can't commit db with no open connection." 227 ) 228 229 def connect(self): 230 """Connect to the database.""" 231 self.connection = sqlite3.connect( 232 self.path, 233 detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES 234 if self.detect_types 235 else 0, 236 timeout=self.connection_timeout, 237 ) 238 self._set_foreign_key_enforcement() 239 self.connection.row_factory = dict_factory 240 241 def count( 242 self, 243 table: str, 244 column: str = "*", 245 where: str | None = None, 246 distinct: bool = False, 247 ) -> int: 248 """Return number of matching rows in `table` table. 249 250 Equivalent to: 251 >>> SELECT COUNT({distinct} {column}) FROM {table} {where};""" 252 query = ( 253 f"SELECT COUNT( {('DISTINCT' if distinct else '')} {column}) FROM {table}" 254 ) 255 if where: 256 query += f" WHERE {where}" 257 query += ";" 258 return int(list(self.query(query)[0].values())[0]) 259 260 def create_table(self, table: str, *column_defs: str): 261 """Create a table if it doesn't exist. 262 263 #### :params: 264 265 `table`: Name of the table to create. 266 267 `column_defs`: Any number of column names and their definitions in proper Sqlite3 sytax. 268 i.e. `"column_name TEXT UNIQUE"` or `"column_name INTEGER PRIMARY KEY"` etc.""" 269 columns = ", ".join(column_defs) 270 result = self.query(f"CREATE TABLE IF NOT EXISTS {table} ({columns});") 271 self.logger.info(f"'{table}' table created.") 272 273 def delete(self, table: str, where: str | None = None) -> int: 274 """Delete rows from `table` that satisfy the given `where` clause. 275 276 If `where` is `None`, all rows will be deleted. 277 278 Returns the number of deleted rows. 279 280 e.g. 281 >>> db = Databased() 282 >>> db.delete("rides", "distance < 5 AND average_speed < 7")""" 283 try: 284 if where: 285 self.query(f"DELETE FROM {table} WHERE {where};") 286 else: 287 self.query(f"DELETE FROM {table};") 288 row_count = self.cursor.rowcount 289 self.logger.info( 290 f"Deleted {row_count} rows from '{table}' where '{where}'." 291 ) 292 return row_count 293 except Exception as e: 294 self.logger.exception( 295 f"Error deleting rows from '{table}' where '{where}'." 296 ) 297 raise e 298 299 def describe(self, table: str) -> list[dict]: 300 """Returns information about `table`.""" 301 return self.query(f"pragma table_info('{table}');") 302 303 def drop_column(self, table: str, column: str): 304 """Drop `column` from `table`.""" 305 self.query(f"ALTER TABLE {table} DROP {column};") 306 307 def drop_table(self, table: str) -> bool: 308 """Drop `table` from the database. 309 310 Returns `True` if successful, `False` if not.""" 311 try: 312 self.query(f"DROP TABLE {table};") 313 self.logger.info(f"Dropped table '{table}'.") 314 return True 315 except Exception as e: 316 print(f"{type(e).__name__}: {e}") 317 self.logger.error(f"Failed to drop table '{table}'.") 318 return False 319 320 def execute_script(self, path: Pathish, encoding: str = "utf-8") -> list[dict]: 321 """Execute sql script located at `path`.""" 322 if not self.connected: 323 self.connect() 324 assert self.connection 325 return self.connection.executescript( 326 Pathier(path).read_text(encoding) 327 ).fetchall() 328 329 def get_columns(self, table: str) -> tuple[str, ...]: 330 """Returns a list of column names in `table`.""" 331 return tuple( 332 (column["name"] for column in self.query(f"pragma table_info('{table}');")) 333 ) 334 335 def insert( 336 self, table: str, columns: tuple[str, ...], values: list[tuple[Any, ...]] 337 ) -> int: 338 """Insert rows of `values` into `columns` of `table`. 339 340 Each `tuple` in `values` corresponds to an individual row that is to be inserted. 341 """ 342 row_count = 0 343 for insert in self._prepare_insert_queries(table, columns, values): 344 try: 345 self.query(insert[0], insert[1]) 346 row_count += self.cursor.rowcount 347 self.logger.info(f"Inserted {row_count} rows into '{table}' table.") 348 except Exception as e: 349 self.logger.exception(f"Error inserting rows into '{table}' table.") 350 raise e 351 return row_count 352 353 def query(self, query_: str, parameters: tuple[Any, ...] = tuple()) -> list[dict]: 354 """Execute an SQL query and return the results. 355 356 Ensures that the database connection is opened before executing the command. 357 358 The cursor used to execute the query will be available through `self.cursor` until the next time `self.query()` is called. 359 """ 360 if not self.connected: 361 self.connect() 362 assert self.connection 363 self.cursor = self.connection.cursor() 364 self.cursor.execute(query_, parameters) 365 return self.cursor.fetchall() 366 367 def rename_column(self, table: str, column_to_rename: str, new_column_name: str): 368 """Rename a column in `table`.""" 369 self.query( 370 f"ALTER TABLE {table} RENAME {column_to_rename} TO {new_column_name};" 371 ) 372 373 def rename_table(self, table_to_rename: str, new_table_name: str): 374 """Rename a table.""" 375 self.query(f"ALTER TABLE {table_to_rename} RENAME TO {new_table_name};") 376 377 def select( 378 self, 379 table: str, 380 columns: list[str] = ["*"], 381 joins: list[str] | None = None, 382 where: str | None = None, 383 group_by: str | None = None, 384 having: str | None = None, 385 order_by: str | None = None, 386 limit: int | str | None = None, 387 exclude_columns: list[str] | None = None, 388 ) -> list[dict]: 389 """Return rows for given criteria. 390 391 If `exclude_columns` is given, `columns` will be ignored and data will be returned with all columns except the ones specified by `exclude_columns`. 392 393 For complex queries, use the `databased.query()` method. 394 395 Parameters `where`, `group_by`, `having`, `order_by`, and `limit` should not have 396 their corresponding key word in their string, but should otherwise be valid SQL. 397 398 `joins` should contain their key word (`INNER JOIN`, `LEFT JOIN`) in addition to the rest of the sub-statement. 399 400 >>> Databased().select( 401 "bike_rides", 402 ["id", "date", "distance", "moving_time", "AVG(distance/moving_time) as average_speed"], 403 where="distance > 20", 404 order_by="distance", 405 desc=True, 406 limit=10 407 ) 408 executes the query: 409 >>> SELECT 410 id, date, distance, moving_time, AVG(distance/moving_time) as average_speed 411 FROM 412 bike_rides 413 WHERE 414 distance > 20 415 ORDER BY 416 distance DESC 417 Limit 10;""" 418 # Assume implicit `[*]` for `columns` param when `exclude_columns` is used. 419 if exclude_columns: 420 columns = [ 421 column 422 for column in self.get_columns(table) 423 if column not in exclude_columns 424 ] 425 query = f"SELECT {', '.join(columns)} FROM {table}" 426 if joins: 427 query += f" {' '.join(joins)}" 428 if where: 429 query += f" WHERE {where}" 430 if group_by: 431 query += f" GROUP BY {group_by}" 432 if having: 433 query += f" HAVING {having}" 434 if order_by: 435 query += f" ORDER BY {order_by}" 436 if limit: 437 query += f" LIMIT {limit}" 438 query += ";" 439 rows = self.query(query) 440 return rows 441 442 @staticmethod 443 def to_grid(data: list[dict], shrink_to_terminal: bool = True) -> str: 444 """Returns a tabular grid from `data`. 445 446 If `shrink_to_terminal` is `True`, the column widths of the grid will be reduced to fit within the current terminal. 447 """ 448 return griddy(data, "keys", shrink_to_terminal) 449 450 def update( 451 self, table: str, column: str, value: Any, where: str | None = None 452 ) -> int: 453 """Update `column` of `table` to `value` for rows satisfying the conditions in `where`. 454 455 If `where` is `None` all rows will be updated. 456 457 Returns the number of updated rows. 458 459 e.g. 460 >>> db = Databased() 461 >>> db.update("rides", "elevation", 100, "elevation < 100")""" 462 try: 463 if where: 464 self.query(f"UPDATE {table} SET {column} = ? WHERE {where};", (value,)) 465 else: 466 self.query(f"UPDATE {table} SET {column} = ?;", (value,)) 467 row_count = self.cursor.rowcount 468 self.logger.info( 469 f"Updated {row_count} rows in '{table}' table to '{column}' = '{value}' where '{where}'." 470 ) 471 return row_count 472 except Exception as e: 473 self.logger.exception( 474 f"Failed to update rows in '{table}' table to '{column}' = '{value}' where '{where}'." 475 ) 476 raise e 477 478 def vacuum(self) -> int: 479 """Reduce disk size of database after row/table deletion. 480 481 Returns space freed up in bytes.""" 482 size = self.path.size 483 self.query("VACUUM;") 484 return size - self.path.size 485 486 # Seat ========================== Database Dump ========================================= 487 488 def _format_column_def(self, description: dict) -> str: 489 name = description["name"] 490 type_ = description["type"] 491 primary_key = bool(description["pk"]) 492 not_null = bool(description["notnull"]) 493 default = description["dflt_value"] 494 column = f"{name} {type_}" 495 if primary_key: 496 column += f" PRIMARY KEY" 497 if not_null: 498 column += f" NOT NULL" 499 if default: 500 if isinstance(default, str): 501 default = f"{default}" 502 column += f" DEFAULT {default}" 503 return column 504 505 def _format_table_data(self, table: str) -> str: 506 columns = self.get_columns(table) 507 rows = [tuple(row.values()) for row in self.select(table)] 508 inserts = self._prepare_insert_queries(table, columns, rows) 509 insert_strings = [] 510 indent = " " * 4 511 for insert in inserts: 512 text = insert[0] 513 sub = "^$data$based$^" 514 text = text.replace("?", sub) 515 for value in insert[1]: 516 if not value: 517 value = "" 518 if isinstance(value, bool): 519 value = int(value) 520 if not isinstance(value, int) and (not isinstance(value, float)): 521 if isinstance(value, str): 522 value = value.replace('"', "'") 523 value = f'"{value}"' 524 text = text.replace(sub, str(value), 1) 525 for pair in [ 526 ("INSERT INTO ", f"INSERT INTO\n{indent}"), 527 (") VALUES (", f")\nVALUES\n{indent}("), 528 ("),", f"),\n{indent}"), 529 ]: 530 text = text.replace(pair[0], pair[1]) 531 insert_strings.append(text) 532 return "\n".join(insert_strings) 533 534 def _format_table_def(self, table: str) -> str: 535 description = self.describe(table) 536 indent = " " * 4 537 columns = ",\n".join( 538 (f"{indent * 2}{self._format_column_def(column)}" for column in description) 539 ) 540 table_def = ( 541 "CREATE TABLE IF NOT EXISTS\n" 542 + f"{indent}{table} (\n" 543 + columns 544 + f"\n{indent});" 545 ) 546 return table_def 547 548 def _get_data_dump_string(self, tables: list[str]) -> str: 549 return "\n\n".join((self._format_table_data(table) for table in tables)) 550 551 def _get_schema_dump_string(self, tables: list[str]) -> str: 552 return "\n\n".join((self._format_table_def(table) for table in tables)) 553 554 def dump_data(self, path: Pathish, tables: list[str] | None = None): 555 """Create a data dump file for the specified tables or all tables, if none are given.""" 556 tables = tables or self.tables 557 path = Pathier(path) 558 path.write_text(self._get_data_dump_string(tables), encoding="utf-8") 559 560 def dump_schema(self, path: Pathish, tables: list[str] | None = None): 561 """Create a schema dump file for the specified tables or all tables, if none are given. 562 563 NOTE: Foreign key relationships/constraints are not preserved when dumping the schema. 564 """ 565 tables = tables or self.tables 566 path = Pathier(path) 567 path.write_text(self._get_schema_dump_string(tables), encoding="utf-8")
SQLite3 wrapper.
Anytime Databased.query()
is called, a connection to the database will be opened if it isn't already open.
(All builtin class functions that access the database do so through the query method.)
Connections, however, need to be closed manually.
Manually closing the connection can be avoiding by using Databased
with a context manager, which will close the connection upon exiting:
>>> with Databased() as db:
>>> # connection closed
>>> rows = db.select("some_table")
>>> # connection opened
>>> # connection closed
Data is returned as a list of dictionaries where each dictionary is {"column": value}
.
35 def __init__( 36 self, 37 dbpath: Pathish = "db.sqlite3", 38 connection_timeout: float = 10, 39 detect_types: bool = True, 40 enforce_foreign_keys: bool = True, 41 commit_on_close: bool = True, 42 log_dir: Pathish | None = None, 43 ): 44 """ 45 :params: 46 * `dbpath`: The path to the database file. Will be created if it doesn't exist. 47 * `connection_timeout`: How long (in seconds) to wait before raising an exception when trying to connect to the database. 48 * `detect_types`: Whether columns with values that can be converted to Python objects should be, 49 i.e. `TIMESTAMP` table data can be recieved and is converted to, upon retrieval, a `datetime.datetime` object. 50 * `enforce_foreign_keys`: Whether to enfore foreign key constraints. 51 * `commit_on_close`: Whether to automatically commit transactions when the connection is closed. 52 * `log_dir`: The directory the transaction log should be saved in. If `None`, it'll be saved in the same directory as the database file. 53 """ 54 self.path = dbpath 55 self.connection_timeout = connection_timeout 56 self.connection = None 57 self._logger_init(log_dir) 58 self.detect_types = detect_types 59 self.commit_on_close = commit_on_close 60 self.enforce_foreign_keys = enforce_foreign_keys
:params:
dbpath
: The path to the database file. Will be created if it doesn't exist.connection_timeout
: How long (in seconds) to wait before raising an exception when trying to connect to the database.detect_types
: Whether columns with values that can be converted to Python objects should be, i.e.TIMESTAMP
table data can be recieved and is converted to, upon retrieval, adatetime.datetime
object.enforce_foreign_keys
: Whether to enfore foreign key constraints.commit_on_close
: Whether to automatically commit transactions when the connection is closed.log_dir
: The directory the transaction log should be saved in. IfNone
, it'll be saved in the same directory as the database file.
If new_path
doesn't exist, it will be created (including parent folders).
Changes to this property won't take effect until the current connection, if open, is closed and a new connection opened.
Should use detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES
when establishing a database connection.
Changes to this property won't take effect until the current connection, if open, is closed and a new connection opened.
198 def add_column(self, table: str, column_def: str): 199 """Add a column to `table`. 200 201 `column_def` should be in the form `{column_name} {type_name} {constraint}`. 202 203 i.e. 204 >>> db = Databased() 205 >>> db.add_column("rides", "num_stops INTEGER NOT NULL DEFAULT 0")""" 206 self.query(f"ALTER TABLE {table} ADD {column_def};")
Add a column to table
.
column_def
should be in the form {column_name} {type_name} {constraint}
.
i.e.
>>> db = Databased()
>>> db.add_column("rides", "num_stops INTEGER NOT NULL DEFAULT 0")
208 def close(self): 209 """Disconnect from the database. 210 211 Does not call `commit()` for you unless the `commit_on_close` property is set to `True`. 212 """ 213 if self.connection: 214 if self.commit_on_close: 215 self.commit() 216 self.connection.close() 217 self.connection = None
Disconnect from the database.
Does not call commit()
for you unless the commit_on_close
property is set to True
.
219 def commit(self): 220 """Commit state of database.""" 221 if self.connection: 222 self.connection.commit() 223 self.logger.info("Committed successfully.") 224 else: 225 raise RuntimeError( 226 "Databased.commit(): Can't commit db with no open connection." 227 )
Commit state of database.
229 def connect(self): 230 """Connect to the database.""" 231 self.connection = sqlite3.connect( 232 self.path, 233 detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES 234 if self.detect_types 235 else 0, 236 timeout=self.connection_timeout, 237 ) 238 self._set_foreign_key_enforcement() 239 self.connection.row_factory = dict_factory
Connect to the database.
241 def count( 242 self, 243 table: str, 244 column: str = "*", 245 where: str | None = None, 246 distinct: bool = False, 247 ) -> int: 248 """Return number of matching rows in `table` table. 249 250 Equivalent to: 251 >>> SELECT COUNT({distinct} {column}) FROM {table} {where};""" 252 query = ( 253 f"SELECT COUNT( {('DISTINCT' if distinct else '')} {column}) FROM {table}" 254 ) 255 if where: 256 query += f" WHERE {where}" 257 query += ";" 258 return int(list(self.query(query)[0].values())[0])
Return number of matching rows in table
table.
Equivalent to:
>>> SELECT COUNT({distinct} {column}) FROM {table} {where};
260 def create_table(self, table: str, *column_defs: str): 261 """Create a table if it doesn't exist. 262 263 #### :params: 264 265 `table`: Name of the table to create. 266 267 `column_defs`: Any number of column names and their definitions in proper Sqlite3 sytax. 268 i.e. `"column_name TEXT UNIQUE"` or `"column_name INTEGER PRIMARY KEY"` etc.""" 269 columns = ", ".join(column_defs) 270 result = self.query(f"CREATE TABLE IF NOT EXISTS {table} ({columns});") 271 self.logger.info(f"'{table}' table created.")
Create a table if it doesn't exist.
:params:
table
: Name of the table to create.
column_defs
: Any number of column names and their definitions in proper Sqlite3 sytax.
i.e. "column_name TEXT UNIQUE"
or "column_name INTEGER PRIMARY KEY"
etc.
273 def delete(self, table: str, where: str | None = None) -> int: 274 """Delete rows from `table` that satisfy the given `where` clause. 275 276 If `where` is `None`, all rows will be deleted. 277 278 Returns the number of deleted rows. 279 280 e.g. 281 >>> db = Databased() 282 >>> db.delete("rides", "distance < 5 AND average_speed < 7")""" 283 try: 284 if where: 285 self.query(f"DELETE FROM {table} WHERE {where};") 286 else: 287 self.query(f"DELETE FROM {table};") 288 row_count = self.cursor.rowcount 289 self.logger.info( 290 f"Deleted {row_count} rows from '{table}' where '{where}'." 291 ) 292 return row_count 293 except Exception as e: 294 self.logger.exception( 295 f"Error deleting rows from '{table}' where '{where}'." 296 ) 297 raise e
Delete rows from table
that satisfy the given where
clause.
If where
is None
, all rows will be deleted.
Returns the number of deleted rows.
e.g.
>>> db = Databased()
>>> db.delete("rides", "distance < 5 AND average_speed < 7")
299 def describe(self, table: str) -> list[dict]: 300 """Returns information about `table`.""" 301 return self.query(f"pragma table_info('{table}');")
Returns information about table
.
303 def drop_column(self, table: str, column: str): 304 """Drop `column` from `table`.""" 305 self.query(f"ALTER TABLE {table} DROP {column};")
Drop column
from table
.
307 def drop_table(self, table: str) -> bool: 308 """Drop `table` from the database. 309 310 Returns `True` if successful, `False` if not.""" 311 try: 312 self.query(f"DROP TABLE {table};") 313 self.logger.info(f"Dropped table '{table}'.") 314 return True 315 except Exception as e: 316 print(f"{type(e).__name__}: {e}") 317 self.logger.error(f"Failed to drop table '{table}'.") 318 return False
Drop table
from the database.
Returns True
if successful, False
if not.
320 def execute_script(self, path: Pathish, encoding: str = "utf-8") -> list[dict]: 321 """Execute sql script located at `path`.""" 322 if not self.connected: 323 self.connect() 324 assert self.connection 325 return self.connection.executescript( 326 Pathier(path).read_text(encoding) 327 ).fetchall()
Execute sql script located at path
.
329 def get_columns(self, table: str) -> tuple[str, ...]: 330 """Returns a list of column names in `table`.""" 331 return tuple( 332 (column["name"] for column in self.query(f"pragma table_info('{table}');")) 333 )
Returns a list of column names in table
.
335 def insert( 336 self, table: str, columns: tuple[str, ...], values: list[tuple[Any, ...]] 337 ) -> int: 338 """Insert rows of `values` into `columns` of `table`. 339 340 Each `tuple` in `values` corresponds to an individual row that is to be inserted. 341 """ 342 row_count = 0 343 for insert in self._prepare_insert_queries(table, columns, values): 344 try: 345 self.query(insert[0], insert[1]) 346 row_count += self.cursor.rowcount 347 self.logger.info(f"Inserted {row_count} rows into '{table}' table.") 348 except Exception as e: 349 self.logger.exception(f"Error inserting rows into '{table}' table.") 350 raise e 351 return row_count
Insert rows of values
into columns
of table
.
Each tuple
in values
corresponds to an individual row that is to be inserted.
353 def query(self, query_: str, parameters: tuple[Any, ...] = tuple()) -> list[dict]: 354 """Execute an SQL query and return the results. 355 356 Ensures that the database connection is opened before executing the command. 357 358 The cursor used to execute the query will be available through `self.cursor` until the next time `self.query()` is called. 359 """ 360 if not self.connected: 361 self.connect() 362 assert self.connection 363 self.cursor = self.connection.cursor() 364 self.cursor.execute(query_, parameters) 365 return self.cursor.fetchall()
Execute an SQL query and return the results.
Ensures that the database connection is opened before executing the command.
The cursor used to execute the query will be available through self.cursor
until the next time self.query()
is called.
367 def rename_column(self, table: str, column_to_rename: str, new_column_name: str): 368 """Rename a column in `table`.""" 369 self.query( 370 f"ALTER TABLE {table} RENAME {column_to_rename} TO {new_column_name};" 371 )
Rename a column in table
.
373 def rename_table(self, table_to_rename: str, new_table_name: str): 374 """Rename a table.""" 375 self.query(f"ALTER TABLE {table_to_rename} RENAME TO {new_table_name};")
Rename a table.
377 def select( 378 self, 379 table: str, 380 columns: list[str] = ["*"], 381 joins: list[str] | None = None, 382 where: str | None = None, 383 group_by: str | None = None, 384 having: str | None = None, 385 order_by: str | None = None, 386 limit: int | str | None = None, 387 exclude_columns: list[str] | None = None, 388 ) -> list[dict]: 389 """Return rows for given criteria. 390 391 If `exclude_columns` is given, `columns` will be ignored and data will be returned with all columns except the ones specified by `exclude_columns`. 392 393 For complex queries, use the `databased.query()` method. 394 395 Parameters `where`, `group_by`, `having`, `order_by`, and `limit` should not have 396 their corresponding key word in their string, but should otherwise be valid SQL. 397 398 `joins` should contain their key word (`INNER JOIN`, `LEFT JOIN`) in addition to the rest of the sub-statement. 399 400 >>> Databased().select( 401 "bike_rides", 402 ["id", "date", "distance", "moving_time", "AVG(distance/moving_time) as average_speed"], 403 where="distance > 20", 404 order_by="distance", 405 desc=True, 406 limit=10 407 ) 408 executes the query: 409 >>> SELECT 410 id, date, distance, moving_time, AVG(distance/moving_time) as average_speed 411 FROM 412 bike_rides 413 WHERE 414 distance > 20 415 ORDER BY 416 distance DESC 417 Limit 10;""" 418 # Assume implicit `[*]` for `columns` param when `exclude_columns` is used. 419 if exclude_columns: 420 columns = [ 421 column 422 for column in self.get_columns(table) 423 if column not in exclude_columns 424 ] 425 query = f"SELECT {', '.join(columns)} FROM {table}" 426 if joins: 427 query += f" {' '.join(joins)}" 428 if where: 429 query += f" WHERE {where}" 430 if group_by: 431 query += f" GROUP BY {group_by}" 432 if having: 433 query += f" HAVING {having}" 434 if order_by: 435 query += f" ORDER BY {order_by}" 436 if limit: 437 query += f" LIMIT {limit}" 438 query += ";" 439 rows = self.query(query) 440 return rows
Return rows for given criteria.
If exclude_columns
is given, columns
will be ignored and data will be returned with all columns except the ones specified by exclude_columns
.
For complex queries, use the databased.query()
method.
Parameters where
, group_by
, having
, order_by
, and limit
should not have
their corresponding key word in their string, but should otherwise be valid SQL.
joins
should contain their key word (INNER JOIN
, LEFT JOIN
) in addition to the rest of the sub-statement.
>>> Databased().select(
"bike_rides",
["id", "date", "distance", "moving_time", "AVG(distance/moving_time) as average_speed"],
where="distance > 20",
order_by="distance",
desc=True,
limit=10
)
executes the query:
>>> SELECT
id, date, distance, moving_time, AVG(distance/moving_time) as average_speed
FROM
bike_rides
WHERE
distance > 20
ORDER BY
distance DESC
Limit 10;
442 @staticmethod 443 def to_grid(data: list[dict], shrink_to_terminal: bool = True) -> str: 444 """Returns a tabular grid from `data`. 445 446 If `shrink_to_terminal` is `True`, the column widths of the grid will be reduced to fit within the current terminal. 447 """ 448 return griddy(data, "keys", shrink_to_terminal)
Returns a tabular grid from data
.
If shrink_to_terminal
is True
, the column widths of the grid will be reduced to fit within the current terminal.
450 def update( 451 self, table: str, column: str, value: Any, where: str | None = None 452 ) -> int: 453 """Update `column` of `table` to `value` for rows satisfying the conditions in `where`. 454 455 If `where` is `None` all rows will be updated. 456 457 Returns the number of updated rows. 458 459 e.g. 460 >>> db = Databased() 461 >>> db.update("rides", "elevation", 100, "elevation < 100")""" 462 try: 463 if where: 464 self.query(f"UPDATE {table} SET {column} = ? WHERE {where};", (value,)) 465 else: 466 self.query(f"UPDATE {table} SET {column} = ?;", (value,)) 467 row_count = self.cursor.rowcount 468 self.logger.info( 469 f"Updated {row_count} rows in '{table}' table to '{column}' = '{value}' where '{where}'." 470 ) 471 return row_count 472 except Exception as e: 473 self.logger.exception( 474 f"Failed to update rows in '{table}' table to '{column}' = '{value}' where '{where}'." 475 ) 476 raise e
Update column
of table
to value
for rows satisfying the conditions in where
.
If where
is None
all rows will be updated.
Returns the number of updated rows.
e.g.
>>> db = Databased()
>>> db.update("rides", "elevation", 100, "elevation < 100")
478 def vacuum(self) -> int: 479 """Reduce disk size of database after row/table deletion. 480 481 Returns space freed up in bytes.""" 482 size = self.path.size 483 self.query("VACUUM;") 484 return size - self.path.size
Reduce disk size of database after row/table deletion.
Returns space freed up in bytes.
554 def dump_data(self, path: Pathish, tables: list[str] | None = None): 555 """Create a data dump file for the specified tables or all tables, if none are given.""" 556 tables = tables or self.tables 557 path = Pathier(path) 558 path.write_text(self._get_data_dump_string(tables), encoding="utf-8")
Create a data dump file for the specified tables or all tables, if none are given.
560 def dump_schema(self, path: Pathish, tables: list[str] | None = None): 561 """Create a schema dump file for the specified tables or all tables, if none are given. 562 563 NOTE: Foreign key relationships/constraints are not preserved when dumping the schema. 564 """ 565 tables = tables or self.tables 566 path = Pathier(path) 567 path.write_text(self._get_schema_dump_string(tables), encoding="utf-8")
Create a schema dump file for the specified tables or all tables, if none are given.
NOTE: Foreign key relationships/constraints are not preserved when dumping the schema.