Python psycopg2.sql.Identifier() Examples
The following are 30
code examples of psycopg2.sql.Identifier().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
psycopg2.sql
, or try the search function
.
Example #1
Source File: retention.py From zulip with Apache License 2.0 | 7 votes |
def restore_models_with_message_key_from_archive(archive_transaction_id: int) -> None: for model in models_with_message_key: query = SQL(""" INSERT INTO {table_name} ({dst_fields}) SELECT {src_fields} FROM {archive_table_name} INNER JOIN zerver_archivedmessage ON {archive_table_name}.message_id = zerver_archivedmessage.id WHERE zerver_archivedmessage.archive_transaction_id = {archive_transaction_id} ON CONFLICT (id) DO NOTHING """) move_rows( model['class'], query, src_db_table=model['archive_table_name'], table_name=Identifier(model['table_name']), archive_transaction_id=Literal(archive_transaction_id), archive_table_name=Identifier(model['archive_table_name']), )
Example #2
Source File: utils.py From pbt with MIT License | 6 votes |
def get_col_from_populations(connect_str_or_path, use_sqlite, population_id, column_name): if use_sqlite: sqlite_path = connect_str_or_path conn = sqlite3.connect(sqlite_path) command = "SELECT {} FROM populations WHERE population_id = ?" command = command.format(column_name) # Warning: SQL injection else: db_connect_str = connect_str_or_path conn = psycopg2.connect(db_connect_str) command = "SELECT {} FROM populations WHERE population_id = %s" command = SQL(command).format(Identifier(column_name)) cur = conn.cursor() cur.execute(command, [population_id]) column = cur.fetchall() cur.close() conn.close() column = [value[0] for value in column] return column
Example #3
Source File: row_queries.py From ontask_b with MIT License | 6 votes |
def delete_row(table_name: str, kv_pair: Tuple[str, Any]): """Delete the row with the given key, value pair. :param table_name: Table to manipulate :param kv_pair: A key=value pair to identify the row. Key is suppose to be unique. :return: Drops that row from the table in the DB """ # Get the key/value subclause bool_clause, query_fields = get_boolean_clause( filter_pairs={kv_pair[0]: kv_pair[1]}, ) # Create the query query = sql.SQL('DELETE FROM {0}').format( sql.Identifier(table_name), ) + sql.SQL(' WHERE ') + bool_clause # Execute the query with connection.connection.cursor() as cursor: cursor.execute(query, query_fields)
Example #4
Source File: row_queries.py From ontask_b with MIT License | 6 votes |
def get_num_rows(table_name, cond_filter=None): """Get the number of rows in the table that satisfy the condition. :param table_name: Table name :param cond_filter: Formula :return: integer """ query = sql.SQL('SELECT count (*) FROM {0}').format( sql.Identifier(table_name)) cond_fields = [] if cond_filter is not None: cond_filter, cond_fields = formula.evaluate( cond_filter, formula.EVAL_SQL, ) query = sql.SQL('{0} WHERE {1}').format(query, cond_filter) with connection.connection.cursor() as cursor: cursor.execute(query, cond_fields) num_rows = cursor.fetchone()[0] return num_rows
Example #5
Source File: psycopg2_tests.py From apm-agent-python with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_psycopg2_composable_query_works(instrument, postgres_connection, elasticapm_client): """ Check that we parse queries that are psycopg2.sql.Composable correctly """ cursor = postgres_connection.cursor() query = sql.SQL("SELECT * FROM {table} WHERE {row} LIKE 't%' ORDER BY {row} DESC").format( table=sql.Identifier("test"), row=sql.Identifier("name") ) baked_query = query.as_string(cursor.__wrapped__) result = None try: elasticapm_client.begin_transaction("web.django") cursor.execute(query) result = cursor.fetchall() elasticapm_client.end_transaction(None, "test-transaction") finally: # make sure we've cleared out the spans for the other tests. assert [(2, "two"), (3, "three")] == result transactions = elasticapm_client.events[TRANSACTION] spans = elasticapm_client.spans_for_transaction(transactions[0]) span = spans[0] assert span["name"] == "SELECT FROM test" assert "db" in span["context"] assert span["context"]["db"]["type"] == "sql" assert span["context"]["db"]["statement"] == baked_query
Example #6
Source File: indexer.py From cccatalog-api with MIT License | 6 votes |
def get_last_item_ids(table): """ Find the last item added to Postgres and return both its sequential ID and its UUID. :param table: The name of the database table to check. :return: A tuple containing a sequential ID and a UUID """ pg_conn = database_connect() pg_conn.set_session(readonly=True) cur = pg_conn.cursor() # Find the last row added to the database table cur.execute( SQL( 'SELECT id, identifier FROM {} ORDER BY id DESC LIMIT 1;' ).format(Identifier(table)) ) last_added_pg_id, last_added_uuid = cur.fetchone() cur.close() pg_conn.close() return last_added_pg_id, last_added_uuid
Example #7
Source File: retention.py From zulip with Apache License 2.0 | 6 votes |
def move_models_with_message_key_to_archive(msg_ids: List[int]) -> None: assert len(msg_ids) > 0 for model in models_with_message_key: query = SQL(""" INSERT INTO {archive_table_name} ({dst_fields}) SELECT {src_fields} FROM {table_name} WHERE {table_name}.message_id IN {message_ids} ON CONFLICT (id) DO NOTHING """) move_rows( model['class'], query, table_name=Identifier(model['table_name']), archive_table_name=Identifier(model['archive_table_name']), message_ids=Literal(tuple(msg_ids)), ) # Attachments can't use the common models_with_message_key system, # because they can be referenced by more than one Message, and we only # want to delete the Attachment if we're deleting the last message # referencing them.
Example #8
Source File: postgres.py From target-postgres with GNU Affero General Public License v3.0 | 6 votes |
def add_index(self, cur, table_name, column_names): index_name = 'tp_{}_{}_idx'.format(table_name, "_".join(column_names)) if len(index_name) > self.IDENTIFIER_FIELD_LENGTH: index_name_hash = hashlib.sha1(index_name.encode('utf-8')).hexdigest()[0:60] index_name = 'tp_{}'.format(index_name_hash) cur.execute(sql.SQL(''' CREATE INDEX {index_name} ON {table_schema}.{table_name} ({column_names}); ''').format( index_name=sql.Identifier(index_name), table_schema=sql.Identifier(self.postgres_schema), table_name=sql.Identifier(table_name), column_names=sql.SQL(', ').join(sql.Identifier(column_name) for column_name in column_names)))
Example #9
Source File: test_wallets.py From bitcoinlib with GNU General Public License v3.0 | 6 votes |
def create_db_if_needed(cls, db): if cls.SCHEMA == 'postgresql': con = psycopg2.connect(user='postgres', host='localhost', password='postgres') con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) cur = con.cursor() try: cur.execute(sql.SQL("CREATE DATABASE {}").format( sql.Identifier(db)) ) except Exception: pass finally: cur.close() con.close() elif cls.SCHEMA == 'mysql': con = mysql.connector.connect(user='root', host='localhost') cur = con.cursor() cur.execute('CREATE DATABASE IF NOT EXISTS {}'.format(db)) con.commit() cur.close() con.close()
Example #10
Source File: utils.py From pbt with MIT License | 6 votes |
def get_max_of_db_column(connect_str_or_path, use_sqlite, table_name, column_name): if use_sqlite: sqlite_path = connect_str_or_path conn = sqlite3.connect(sqlite_path) cur = conn.cursor() parameters = [column_name, table_name] cur.execute("SELECT MAX({}) FROM {}".format(*parameters)) else: db_connect_str = connect_str_or_path conn = psycopg2.connect(db_connect_str) cur = conn.cursor() parameters = [Identifier(column_name), Identifier(table_name)] cur.execute(SQL("SELECT MAX({}) FROM {}").format(*parameters)) max_value = cur.fetchone()[0] cur.close() conn.close() return max_value
Example #11
Source File: migrate-mysql-to-postgresql.py From pajbot with MIT License | 6 votes |
def copy_table(destination_table_name, columns, coercions={}): source_table_name = "tb_" + destination_table_name print(f"Transfer table contents: {source_table_name}: Querying MySQL... ", end="") mysql.execute(f"SELECT {','.join(columns)} FROM {source_table_name}") rows = mysql.fetchall() print("Applying coercions in-memory... ", end="") for column_name, coercion in coercions.items(): row_id = columns.index(column_name) rows = coercion(rows, row_id) rows = list(rows) print("Inserting into PostgreSQL... ", end="") # psql.execute(sql.SQL("DELETE FROM {}").format(sql.Identifier(destination_table_name))) psql_sql = sql.SQL("INSERT INTO {} VALUES %s").format(sql.Identifier(destination_table_name)) execute_values(psql, psql_sql, rows) print("done.")
Example #12
Source File: insert_aws_org_tree.py From koku with GNU Affero General Public License v3.0 | 6 votes |
def _set_deleted_timestamp(self, date): """Updates the delete timestamp for values left in the yesterday lists.""" if self.yesterday_accounts != []: for account_id in self.yesterday_accounts: alias_query = """SELECT id FROM {schema}.reporting_awsaccountalias WHERE account_id = %s;""" alias_query = sql.SQL(alias_query).format(schema=sql.Identifier(self.schema)) self.cursor.execute(alias_query, [account_id]) alias_id = self.cursor.fetchone()[0] update_delete_sql = """UPDATE {schema}.reporting_awsorganizationalunit SET deleted_timestamp = %s WHERE account_alias_id = %s""" update_delete_sql = sql.SQL(update_delete_sql).format(schema=sql.Identifier(self.schema)) self.cursor.execute(update_delete_sql, [date, alias_id]) if self.yesterday_orgs != []: for org_unit in self.yesterday_orgs: update_delete_sql = """UPDATE {schema}.reporting_awsorganizationalunit SET deleted_timestamp = %s WHERE org_unit_id = %s""" update_delete_sql = sql.SQL(update_delete_sql).format(schema=sql.Identifier(self.schema)) self.cursor.execute(update_delete_sql, [date, org_unit]) self.yesterday_accounts = self.today_accounts self.yesterday_orgs = self.today_orgs self.today_accounts = [] self.today_orgs = []
Example #13
Source File: insert_aws_org_tree.py From koku with GNU Affero General Public License v3.0 | 6 votes |
def _insert_org_sql(self, org_node, date): """Inserts the org unit information into the database.""" select_sql = """SELECT * FROM {schema}.reporting_awsorganizationalunit WHERE org_unit_name = %s and org_unit_id = %s and org_unit_path = %s and level = %s""" select_sql = sql.SQL(select_sql).format(schema=sql.Identifier(self.schema)) values = [org_node["org_unit_name"], org_node["org_unit_id"], org_node["org_path"], org_node["level"]] self.cursor.execute(select_sql, values) org_exists = self.cursor.fetchone() if org_exists is None: org_insert_sql = """INSERT INTO {schema}.reporting_awsorganizationalunit (org_unit_name, org_unit_id, org_unit_path, created_timestamp, level) VALUES (%s, %s, %s, %s, %s);""" org_insert_sql = sql.SQL(org_insert_sql).format(schema=sql.Identifier(self.schema)) values = [ org_node["org_unit_name"], org_node["org_unit_id"], org_node["org_path"], date, org_node["level"], ] self.cursor.execute(org_insert_sql, values)
Example #14
Source File: column_queries.py From ontask_b with MIT License | 6 votes |
def get_text_column_hash(table_name: str, column_name: str) -> str: """Calculate and return the MD5 hash of a text column. :param table_name: table to use :param column_name: column to pull the values :return: MD5 hash of the concatenation of the column values """ query = sql.SQL('SELECT MD5(STRING_AGG({0}, {1})) FROM {2}').format( OnTaskDBIdentifier(column_name), sql.Literal(''), sql.Identifier(table_name), ) with connection.connection.cursor() as cursor: cursor.execute(query) return cursor.fetchone()[0]
Example #15
Source File: column_queries.py From ontask_b with MIT License | 6 votes |
def db_rename_column(table: str, old_name: str, new_name: str): """Rename a column in the database. :param table: table :param old_name: Old name of the column :param new_name: New name of the column :return: Nothing. Change reflected in the database table """ if len(new_name) > COLUMN_NAME_SIZE: raise Exception( _('Column name is longer than {0} characters').format( COLUMN_NAME_SIZE)) with connection.connection.cursor() as cursor: cursor.execute(sql.SQL('ALTER TABLE {0} RENAME {1} TO {2}').format( sql.Identifier(table), sql.Identifier(old_name), sql.Identifier(new_name), ))
Example #16
Source File: column_queries.py From ontask_b with MIT License | 6 votes |
def is_column_unique(table_name: str, column_name: str) -> bool: """Return if a table column has all non-empty unique values. :param table_name: table :param column_name: column :return: Boolean (is unique) """ query = sql.SQL('SELECT COUNT(DISTINCT {0}) = count(*) from {1}').format( OnTaskDBIdentifier(column_name), sql.Identifier(table_name), ) # Get the result with connection.connection.cursor() as cursor: cursor.execute(query, []) return cursor.fetchone()[0]
Example #17
Source File: database_manager.py From DarkDarkGo with MIT License | 6 votes |
def get_relation_for_chunk_id(self, relation_name, chunk_id): """ Return all results in a given relation for a specific chunk id Example: - get_relation_for_chunk_id(relation_name='crawler', 101) returns crawler's host for chunk id 101 - get_relation_for_chunk_id(relation_name='index_builder', 101) returns index builder's host for chunk id 101 :param chunk_id: Chunk ID :return: List of dictionary """ try: conn = psycopg2.connect("dbname='{0}'".format(DATABASE)) cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) cur.execute(sql.SQL("SELECT * FROM {} WHERE chunk_id = %s").format(sql.Identifier(relation_name)), [chunk_id]) results = cur.fetchall() cur.close() return results except Exception as e: print(e)
Example #18
Source File: column_queries.py From ontask_b with MIT License | 6 votes |
def copy_column_in_db( table_name: str, col_from: str, col_to: str, ): """Copy the values in one column to another. :param table_name: Table to process :param col_from: Source column :param col_to: Destination column :return: Nothing. The change is performed in the DB """ query = sql.SQL('UPDATE {0} SET {1}={2}').format( sql.Identifier(table_name), sql.Identifier(col_to), sql.Identifier(col_from), ) connection.connection.cursor().execute(query)
Example #19
Source File: database_manager.py From DarkDarkGo with MIT License | 5 votes |
def clear_relation(self, relation_name): """ Clear all records for a given relation :param relation_name: Relation name :return: None """ try: conn = psycopg2.connect("dbname='{0}'".format(DATABASE)) cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) cur.execute(sql.SQL("DELETE FROM {}").format(sql.Identifier(relation_name))) cur.execute("ALTER SEQUENCE {0}_index_seq RESTART WITH 1;".format(relation_name)) conn.commit() cur.close() except Exception as e: print(e)
Example #20
Source File: database_manager.py From DarkDarkGo with MIT License | 5 votes |
def get_relation_length(self, relation_name): """ Return length of a given relation :param relation_name: Relation name :return: Int """ try: conn = psycopg2.connect("dbname='{0}'".format(DATABASE)) cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) cur.execute(sql.SQL("SELECT COUNT(*) FROM {}").format(sql.Identifier(relation_name))) result = cur.fetchall() cur.close() return result[0]['count'] except Exception as e: print(e)
Example #21
Source File: test_tools.py From bitcoinlib with GNU General Public License v3.0 | 5 votes |
def init_postgresql(_): con = psycopg2.connect(user='postgres', host='localhost', password='postgres') con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) cur = con.cursor() cur.execute(sql.SQL("DROP DATABASE IF EXISTS {}").format( sql.Identifier(DATABASE_NAME)) ) cur.execute(sql.SQL("CREATE DATABASE {}").format( sql.Identifier(DATABASE_NAME)) ) cur.close() con.close()
Example #22
Source File: database_manager.py From DarkDarkGo with MIT License | 5 votes |
def get_relation(self, relation_name): """ Get all records for a given relation :param relation_name: Relation name :return: List of dictionary """ try: conn = psycopg2.connect("dbname='{0}'".format(DATABASE)) cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) cur.execute(sql.SQL("SELECT * FROM {} ORDER BY index").format(sql.Identifier(relation_name))) relation = cur.fetchall() cur.close() return relation except Exception as e: print(e)
Example #23
Source File: openupgrade_80.py From openupgradelib with GNU Affero General Public License v3.0 | 5 votes |
def set_message_last_post(cr, uid, pool, models): """ Given a list of models, set their 'message_last_post' fields to an estimated last post datetime. To be called in post-migration scripts :param cr: database cursor :param uid: user id, assumed to be openerp.SUPERUSER_ID :param pool: orm pool, assumed to be openerp.pooler.get_pool(cr.dbname) :param models: a list of model names for which 'message_last_post' needs \ to be filled :return: """ if type(models) is not list: models = [models] for model in models: model_pool = pool[model] query = sql.SQL(""" UPDATE {table} main SET message_last_post = mm.last_date FROM (SELECT res_id, MAX(date) AS last_date FROM mail_message WHERE model = %s AND date IS NOT NULL GROUP BY res_id) AS mm WHERE main.id = mm.res_id """).format(table=sql.Identifier(model_pool._table)) logged_query(cr, query, (model,))
Example #24
Source File: column_queries.py From ontask_b with MIT License | 5 votes |
def add_column_to_db( table_name: str, col_name: str, col_type: str, initial=None, ): """Add an extra column of the given type with initial value. :param table_name: Table to consider :param col_name: Column name :param col_type: OnTask column type :param initial: initial value :return: Nothing. Effect done in the DB """ sql_type = ontask_to_sql_datatype_names[col_type] query_skel = 'ALTER TABLE {0} ADD COLUMN {1} ' + sql_type query = sql.SQL(query_skel).format( sql.Identifier(table_name), sql.Identifier(col_name), sql.Literal(initial), ) if initial is not None: query = query + sql.SQL(' DEFAULT ') + sql.Literal(initial) connection.connection.cursor().execute(query)
Example #25
Source File: openupgrade.py From openupgradelib with GNU Affero General Public License v3.0 | 5 votes |
def remove_tables_fks(cr, tables): """Remove foreign keys declared in ``tables``. This is useful when a table is not going to be used anymore, but you still don't want to delete it. If you keep FKs in that table, it will still get modifications when other tables are modified too; but if you're keeping that table as a log, that is a problem. Also, if some of the FK has no index, it could slow down deletion in other tables, even when this one has no more use. .. HINT:: This method removes FKs that are *declared* in ``tables``, **not** FKs that *point* to those tables. :param [str, ...] tables: List of tables where the FKs were declared, and where they will be removed too. If a table doesn't exist, it is skipped. """ drop_sql = sql.SQL("ALTER TABLE {} DROP CONSTRAINT {}") for table in tables: cr.execute( """ SELECT constraint_name FROM information_schema.table_constraints WHERE constraint_type = 'FOREIGN KEY' AND table_name = %s """, (table,), ) for constraint in (row[0] for row in cr.fetchall()): logged_query( cr, drop_sql.format( sql.Identifier(table), sql.Identifier(constraint), ), )
Example #26
Source File: table_queries.py From ontask_b with MIT License | 5 votes |
def delete_table(table_name: str): """Delete the given table. :param table_name: Table to delete :return: Drop the table in the DB """ query = sql.SQL('DROP TABLE IF EXISTS {0}').format( sql.Identifier(table_name)) try: with connection.connection.cursor() as cursor: cursor.execute(query) except Exception as exc: LOGGER.error('Error when dropping table %s: %s', table_name, str(exc))
Example #27
Source File: table_queries.py From ontask_b with MIT License | 5 votes |
def get_select_query( table_name: str, column_names: Optional[List[str]] = None, filter_formula: Optional[Dict] = None, filter_pairs: Optional[Mapping] = None, ) -> Tuple[sql.Composed, List[Any]]: """Calculate pair query, fields to execute a select statement. :param table_name: Table to query :param column_names: list of columns to consider or None to consider all :param filter_formula: Text filter expression :param filter_pairs: Dictionary of key/value pairs. :return: (sql query, sql params) """ if column_names: query = sql.SQL('SELECT {0} FROM {1}').format( sql.SQL(', ').join([ OnTaskDBIdentifier(cname) for cname in column_names ]), sql.Identifier(table_name), ) else: query = sql.SQL('SELECT * FROM {0}').format(sql.Identifier(table_name)) query_fields = [] if filter_formula or filter_pairs: bool_clause, query_fields = get_boolean_clause( filter_formula=filter_formula, filter_pairs=filter_pairs, ) if bool_clause: query = query + sql.SQL(' WHERE ') + bool_clause return query, query_fields
Example #28
Source File: table_queries.py From ontask_b with MIT License | 5 votes |
def rename_table(table: str, new_name: str): """Rename a table in the database. :param table: Current table name :param new_name: New table name :return: Nothing. Change reflected in the database table """ with connection.connection.cursor() as cursor: cursor.execute(sql.SQL('ALTER TABLE {0} RENAME TO {1}').format( sql.Identifier(table), sql.Identifier(new_name), ))
Example #29
Source File: table_queries.py From ontask_b with MIT License | 5 votes |
def clone_table(table_from: str, table_to: str): """Clone a table in the database. :param table_from: Source table. :param table_to: New table. :return: Nothing. Effect in DB """ with connection.connection.cursor() as cursor: cursor.execute(sql.SQL('CREATE TABLE {0} AS TABLE {1}').format( sql.Identifier(table_to), sql.Identifier(table_from)))
Example #30
Source File: test_migrations.py From target-postgres with GNU Affero General Public License v3.0 | 5 votes |
def table_length(schema, table): with psycopg2.connect(**TEST_DB) as conn: with conn.cursor() as cur: cur.execute(sql.SQL( "SELECT count(*) FROM {}.{}" ).format( sql.Identifier(schema), sql.Identifier(table))) return cur.fetchone()[0]