Python alembic.op.bulk_insert() Examples
The following are 30
code examples of alembic.op.bulk_insert().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
alembic.op
, or try the search function
.
Example #1
Source File: e3db52a480f8_alter_log_data_type.py From chainerui with MIT License | 8 votes |
def upgrade(): conn = op.get_bind() temp_log_table = op.create_table( 'temp_log', sa.Column('id', sa.Integer(), nullable=False), sa.Column('result_id', sa.Integer(), nullable=True), sa.Column('data', sa.LargeBinary(length=2048), nullable=True), sa.ForeignKeyConstraint(['result_id'], ['result.id'], ), sa.PrimaryKeyConstraint('id')) res = conn.execute('SELECT id, result_id, data FROM log') results = res.fetchall() if len(results) > 0: modified_logs = [{ 'id': r[0], 'result_id': r[1], 'data': msgpack.packb(json.loads(r[2]), use_bin_type=True)} for r in results] op.bulk_insert(temp_log_table, modified_logs) op.drop_table('log') op.rename_table('temp_log', 'log')
Example #2
Source File: 6f7dfb241354_create_opendaylight_preiodic_task_table.py From networking-odl with Apache License 2.0 | 7 votes |
def upgrade(): periodic_table = op.create_table( 'opendaylight_periodic_task', sa.Column('state', sa.Enum(odl_const.PENDING, odl_const.PROCESSING, name='state'), nullable=False), sa.Column('processing_operation', sa.String(70)), sa.Column('task', sa.String(70), primary_key=True), sa.Column('lock_updated', sa.TIMESTAMP, nullable=False, server_default=sa.func.now(), onupdate=sa.func.now()) ) op.bulk_insert(periodic_table, [{'task': 'maintenance', 'state': odl_const.PENDING}, {'task': 'hostconfig', 'state': odl_const.PENDING}])
Example #3
Source File: 0306_letter_rates_price_rise.py From notifications-api with MIT License | 6 votes |
def upgrade(): # all old rates are going in the bin conn = op.get_bind() conn.execute(text("UPDATE letter_rates SET end_date = :start WHERE end_date IS NULL"), start=CHANGEOVER_DATE) base_prices = { 'second': 30, 'first': 56, } op.bulk_insert(LetterRate.__table__, [ { 'id': uuid.uuid4(), 'start_date': CHANGEOVER_DATE, 'end_date': None, 'sheet_count': sheet_count, 'rate': (base_prices[post_class] + (5 * sheet_count)) / 100.0, 'crown': crown, 'post_class': post_class, } for sheet_count, crown, post_class in itertools.product( range(1, 6), [True, False], ['first', 'second'] ) ])
Example #4
Source File: a5ed252d562f_create_comic_entry_for_davila_in_faro_.py From stampython with GNU General Public License v3.0 | 6 votes |
def upgrade(): """ Performs upgrade of database """ comic = table('comic', sa.Column('name', sa.Text), sa.Column('type', sa.Text), sa.Column('channelgid', sa.Text), sa.Column('lastchecked', sa.Text), sa.Column('url', sa.Text), sa.Column('imgxpath', sa.Text), sa.Column('txtxpath', sa.Text) ) op.bulk_insert( comic, [ {'name': 'davila', 'type': 'url', 'channelgid': '-1001069507044', 'lastchecked': '1981/01/24', 'url': 'http://fotos.farodevigo.es/vinyetas/#year#/#month#/#day#/davila.jpg', 'imgxpath': 'False', 'txtxpath': 'False'} ] )
Example #5
Source File: 55874a4ceed6_add_l7policy_action_redirect_prefix.py From octavia with Apache License 2.0 | 6 votes |
def upgrade(): # Add collumn redirect_prefix op.add_column( u'l7policy', sa.Column(u'redirect_prefix', sa.String(255), nullable=True) ) insert_table = sql.table( u'l7policy_action', sql.column(u'name', sa.String), sql.column(u'description', sa.String) ) op.bulk_insert( insert_table, [ {'name': 'REDIRECT_PREFIX'} ] )
Example #6
Source File: 495fea37afc5_create_comic_entry_for_bernardo_vergara.py From stampython with GNU General Public License v3.0 | 6 votes |
def upgrade(): """ Performs upgrade of database """ comic = table('comic', sa.Column('name', sa.Text), sa.Column('type', sa.Text), sa.Column('channelgid', sa.Text), sa.Column('lastchecked', sa.Text), sa.Column('url', sa.Text) ) op.bulk_insert( comic, [ {'name': 'vergara', 'type': 'rss', 'channelgid': '-1001105187138', 'lastchecked': '1981/01/24', 'url': 'http://www.eldiario.es/rss/section/20038/'} ] )
Example #7
Source File: 214568dbb0b7_feed_quico_jubilata_comic_data.py From stampython with GNU General Public License v3.0 | 6 votes |
def upgrade(): """ Performs upgrade of database """ comic = table('comic', sa.Column('name', sa.Text), sa.Column('type', sa.Text), sa.Column('channelgid', sa.Text), sa.Column('lastchecked', sa.Text), sa.Column('url', sa.Text), sa.Column('imgxpath', sa.Text), sa.Column('txtxpath', sa.Text) ) op.bulk_insert( comic, [ {'name': 'quico', 'type': 'url', 'channelgid': '-1001105187138', 'lastchecked': '1981/01/24', 'url': 'http://www.quicojubilata.com/quico-jubilata/#year#-#month#-#day#', 'imgxpath': '//img[@class="img-responsive"]/@src', 'txtxpath': '//h1[@class="js-quickedit-page-title page-header"]/span/text()' } ] )
Example #8
Source File: afe311f9077d_create_comic_entry_for_el_jueves.py From stampython with GNU General Public License v3.0 | 6 votes |
def upgrade(): """ Performs upgrade of database """ comic = table('comic', sa.Column('name', sa.Text), sa.Column('type', sa.Text), sa.Column('channelgid', sa.Text), sa.Column('lastchecked', sa.Text), sa.Column('url', sa.Text) ) op.bulk_insert( comic, [ {'name': 'jueves', 'type': 'rss', 'channelgid': '-1001105187138', 'lastchecked': '1981/01/24', 'url': 'http://www.eljueves.es/feeds/vineta-del-dia.html'} ] )
Example #9
Source File: ccb959ea2b27_feed_dilbert_comic_data.py From stampython with GNU General Public License v3.0 | 6 votes |
def upgrade(): """ Performs upgrade of database """ comic = table('comic', sa.Column('name', sa.Text), sa.Column('type', sa.Text), sa.Column('channelgid', sa.Text), sa.Column('lastchecked', sa.Text), sa.Column('url', sa.Text), sa.Column('imgxpath', sa.Text), sa.Column('txtxpath', sa.Text) ) op.bulk_insert( comic, [ {'name': 'dilbert', 'type': 'url', 'channelgid': '-1001066352913', 'lastchecked': '1981/01/24', 'url': 'http://dilbert.com/strip/#year#-#month#-#day#', 'imgxpath': '//img[@class="img-responsive img-comic"]/@src', 'txtxpath': '//img[@class="img-responsive img-comic"]/@alt'} ] )
Example #10
Source File: b73beec49812_create_comic_entry_for_forges_in_elpais.py From stampython with GNU General Public License v3.0 | 6 votes |
def upgrade(): """ Performs upgrade of database """ comic = table('comic', sa.Column('name', sa.Text), sa.Column('type', sa.Text), sa.Column('channelgid', sa.Text), sa.Column('lastchecked', sa.Text), sa.Column('url', sa.Text), sa.Column('imgxpath', sa.Text), sa.Column('txtxpath', sa.Text) ) op.bulk_insert( comic, [ {'name': 'forges', 'type': 'rssurl', 'channelgid': '-1001105187138', 'lastchecked': '1981/01/24', 'url': 'http://ep00.epimg.net/rss/tags/a_antonio_fraguas_forges_a.xml', 'imgxpath': '//div[@id="articulo_contenedor"]//img/@src', 'txtxpath': '//div[@id="articulo_contenedor"]//img/@alt'} ] )
Example #11
Source File: 03bbe0bc1832_create_comic_entry_for_mel.py From stampython with GNU General Public License v3.0 | 6 votes |
def upgrade(): """ Performs upgrade of database """ comic = table('comic', sa.Column('name', sa.Text), sa.Column('type', sa.Text), sa.Column('channelgid', sa.Text), sa.Column('lastchecked', sa.Text), sa.Column('url', sa.Text) ) op.bulk_insert( comic, [ {'name': 'mel', 'type': 'rss', 'channelgid': '-1001105187138', 'lastchecked': '1981/01/24', 'url': 'http://elchistedemel.blogspot.com/feeds/posts/default'} ] )
Example #12
Source File: 6581184f2314_create_comic_entry_for_o_bichero.py From stampython with GNU General Public License v3.0 | 6 votes |
def upgrade(): """ Performs upgrade of database """ comic = table('comic', sa.Column('name', sa.Text), sa.Column('type', sa.Text), sa.Column('channelgid', sa.Text), sa.Column('lastchecked', sa.Text), sa.Column('url', sa.Text) ) op.bulk_insert( comic, [ {'name': 'obichero', 'type': 'rss', 'channelgid': '-1001069507044', 'lastchecked': '1981/01/24', 'url': 'http://obichero.blogspot.com/feeds/posts/default'} ] )
Example #13
Source File: 447b2802474f_seed_correlations.py From whiskyton with MIT License | 6 votes |
def upgrade(): # create basic vars whiskies = Whisky.query.all() correlations = list() data = list() # loop twice to compare all whiskies for reference in whiskies: for whisky in whiskies: # add correlation if it does not already exists item = (whisky.id, reference.id) if item not in correlations and whisky.id != reference.id: data.append(reference.get_correlation(whisky)) correlations.append(item) # bulk insert op.bulk_insert(Correlation.__table__, data)
Example #14
Source File: a224268bda6b_feed_garfield_comic_data.py From stampython with GNU General Public License v3.0 | 6 votes |
def upgrade(): """ Performs upgrade of database """ comic = table('comic', sa.Column('name', sa.Text), sa.Column('type', sa.Text), sa.Column('channelgid', sa.Text), sa.Column('lastchecked', sa.Text), sa.Column('url', sa.Text), sa.Column('imgxpath', sa.Text), sa.Column('txtxpath', sa.Text) ) op.bulk_insert( comic, [ {'name': 'garfield', 'type': 'url', 'channelgid': '-1001105187138', 'lastchecked': '1981/01/24', 'url': 'https://s3.amazonaws.com/static.garfield.com/comics/garfield/#year#/#year#-#month#-#day#.gif', 'imgxpath': 'False', 'txtxpath': 'False'} ] )
Example #15
Source File: e3db52a480f8_alter_log_data_type.py From chainerui with MIT License | 6 votes |
def downgrade(): conn = op.get_bind() temp_log_table = op.create_table( 'temp_log', sa.Column('id', sa.Integer(), nullable=False), sa.Column('result_id', sa.Integer(), nullable=True), sa.Column('data', sa.String(length=1024), nullable=True), sa.ForeignKeyConstraint(['result_id'], ['result.id'], ), sa.PrimaryKeyConstraint('id')) res = conn.execute('SELECT id, result_id, data FROM log') results = res.fetchall() if len(results) > 0: modified_logs = [{ 'id': r[0], 'result_id': r[1], 'data': json.dumps(msgpack.unpackb(r[2], raw=False))} for r in results] op.bulk_insert(temp_log_table, modified_logs) op.drop_table('log') op.rename_table('temp_log', 'log')
Example #16
Source File: 16_a1ed2f75cb13_.py From betterlifepsi with MIT License | 6 votes |
def upgrade(): ### commands auto generated by Alembic - please adjust! ### role_table = sa.table('role', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('name', sa.String(length=80), nullable=False), sa.Column('description', sa.String(length=255), nullable=True), sa.Column('parent_id', sa.Integer(), nullable=True), ) res = op.get_bind().execute('SELECT max(id)+1 FROM role') cm = 46 for r in res.fetchall(): cm = r[0] op.bulk_insert(role_table, [ {'id': cm, 'name': 'import_store_data', 'description': u'导入店铺运营数据', 'parent_id': None}, ], multiinsert=False) from sqlalchemy.sql import text op.get_bind().execute(text("ALTER SEQUENCE role_id_seq RESTART WITH " + str(cm + 1) + ";")) ### end Alembic commands ###
Example #17
Source File: 14d5e28d3626_lecture_assistants.py From muesli with GNU General Public License v3.0 | 6 votes |
def upgrade(): ### Caution: Only works in online mode!!! op.create_table('lecture_assistants', sa.Column('lecture', sa.Integer(), nullable=False), sa.Column('assistant', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['assistant'], ['users.id'], ), sa.ForeignKeyConstraint(['lecture'], ['lectures.id'], ), sa.PrimaryKeyConstraint('lecture', 'assistant') ) lectures = sa.sql.table('lectures', sa.Column('id', sa.Integer), sa.Column('assistant', sa.Integer) ) connection = op.get_bind() results = connection.execute(lectures.select()) ins = [] for res in results: if res.assistant!=None: ins.append({'lecture': res.id, 'assistant': res.assistant}) lecture_assistants = sa.sql.table('lecture_assistants', sa.Column('lecture', sa.Integer(), nullable=False), sa.Column('assistant', sa.Integer(), nullable=False) ) if ins: op.bulk_insert(lecture_assistants,ins)
Example #18
Source File: 1f0bd302c1a6_add_availability_zones_table.py From manila with Apache License 2.0 | 6 votes |
def collect_existing_az_from_services_table(connection, services_table, az_table): az_name_to_id_mapping = dict() existing_az = [] for service in connection.execute(services_table.select()): if service.availability_zone in az_name_to_id_mapping: continue az_id = uuidutils.generate_uuid() az_name_to_id_mapping[service.availability_zone] = az_id existing_az.append({ 'created_at': timeutils.utcnow(), 'id': az_id, 'name': service.availability_zone }) op.bulk_insert(az_table, existing_az) return az_name_to_id_mapping
Example #19
Source File: test_bulk_insert.py From alembic with MIT License | 6 votes |
def test_bulk_insert_from_new_table(self): context = op_fixture("postgresql", True) t1 = op.create_table( "ins_table", Column("id", Integer), Column("v1", String()), Column("v2", String()), ) op.bulk_insert( t1, [ {"id": 1, "v1": "row v1", "v2": "row v5"}, {"id": 2, "v1": "row v2", "v2": "row v6"}, ], ) context.assert_( "CREATE TABLE ins_table (id INTEGER, v1 VARCHAR, v2 VARCHAR)", "INSERT INTO ins_table (id, v1, v2) VALUES " "(1, 'row v1', 'row v5')", "INSERT INTO ins_table (id, v1, v2) VALUES " "(2, 'row v2', 'row v6')", )
Example #20
Source File: test_bulk_insert.py From alembic with MIT License | 6 votes |
def test_bulk_insert_inline_literal(self): class MyType(TypeEngine): pass t1 = table("foo", column("id", Integer), column("data", MyType())) self.op.bulk_insert( t1, [ {"id": 1, "data": self.op.inline_literal("d1")}, {"id": 2, "data": self.op.inline_literal("d2")}, ], multiinsert=False, ) eq_( self.conn.execute(text("select id, data from foo")).fetchall(), [(1, "d1"), (2, "d2")], )
Example #21
Source File: 703dbf02afde_add_journal_maintenance_table.py From networking-odl with Apache License 2.0 | 6 votes |
def upgrade(): maint_table = op.create_table( 'opendaylight_maintenance', sa.Column('id', sa.String(36), primary_key=True), sa.Column('state', sa.Enum(odl_const.PENDING, odl_const.PROCESSING, name='state'), nullable=False), sa.Column('processing_operation', sa.String(70)), sa.Column('lock_updated', sa.TIMESTAMP, nullable=False, server_default=sa.func.now(), onupdate=sa.func.now()) ) # Insert the only row here that is used to synchronize the lock between # different Neutron processes. op.bulk_insert(maint_table, [{'id': uuidutils.generate_uuid(), 'state': odl_const.PENDING}])
Example #22
Source File: 0299_org_types_table.py From notifications-api with MIT License | 5 votes |
def upgrade(): organisation_types_table = op.create_table( 'organisation_types', sa.Column('name', sa.String(), nullable=False), sa.PrimaryKeyConstraint('name'), sa.Column('is_crown', sa.Boolean, nullable=True), sa.Column('annual_free_sms_fragment_limit', sa.BigInteger, nullable=False) ) op.bulk_insert( organisation_types_table, [ {'name': x, 'is_crown': y, 'annual_free_sms_fragment_limit': z} for x, y, z in [ ["central", None, 250000], ["local", False, 25000], ["nhs", None, 25000], ["nhs_central", True, 250000], ["nhs_local", False, 25000], ["emergency_service", False, 25000], ["school_or_college", False, 25000], ["other", None, 25000], ] ] ) op.alter_column('services', 'crown', nullable=True) op.alter_column('services_history', 'crown', nullable=True)
Example #23
Source File: 56cdbe267881_add_share_export_locations_table.py From manila with Apache License 2.0 | 5 votes |
def upgrade(): export_locations_table = op.create_table( 'share_export_locations', sa.Column('id', sa.Integer, primary_key=True, nullable=False), sa.Column('created_at', sa.DateTime), sa.Column('updated_at', sa.DateTime), sa.Column('deleted_at', sa.DateTime), sa.Column('deleted', sa.Integer, default=0), sa.Column('path', sa.String(2000)), sa.Column('share_id', sa.String(36), sa.ForeignKey('shares.id', name="sel_id_fk")), mysql_engine='InnoDB', mysql_charset='utf8') shares_table = table( 'shares', sa.Column('created_at', sa.DateTime), sa.Column('deleted_at', sa.DateTime), sa.Column('deleted', sa.Integer), sa.Column('export_location', sa.String(length=255)), sa.Column('id', sa.String(length=36)), sa.Column('updated_at', sa.DateTime)) export_locations = [] session = sa.orm.Session(bind=op.get_bind().connect()) for share in session.query(shares_table).all(): deleted = share.deleted if isinstance(share.deleted, int) else 0 export_locations.append({ 'created_at': share.created_at, 'updated_at': share.updated_at, 'deleted_at': share.deleted_at, 'deleted': deleted, 'share_id': share.id, 'path': share.export_location, }) op.bulk_insert(export_locations_table, export_locations) op.drop_column('shares', 'export_location') session.close_all()
Example #24
Source File: 205fd513c96_added_taskconfig.py From freight with Apache License 2.0 | 5 votes |
def upgrade(): taskconfig_table = op.create_table( "taskconfig", sa.Column("id", sa.Integer(), nullable=False), sa.Column("app_id", sa.Integer(), nullable=False), sa.Column("provider", sa.String(length=64), nullable=False), sa.Column("type", sa.Integer(), nullable=True), sa.Column("data", freight.db.types.json.JSONEncodedDict(), nullable=True), sa.ForeignKeyConstraint(["app_id"], ["app.id"], ondelete="CASCADE"), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("app_id", "type", name="unq_app_id_type"), ) op.create_index("idx_taskconfig_app_id", "taskconfig", ["app_id"], unique=False) op.create_index("idx_taskconfig_type", "taskconfig", ["type"], unique=False) connection = op.get_bind() app_table = table( "app", sa.Column("id", sa.Integer(), nullable=False), sa.Column("provider", sa.String(length=54), nullable=False), sa.Column("data", freight.db.types.json.JSONEncodedDict(), nullable=True), ) # Copy over the existing configs out of the App table and into TaskConfigs for app in connection.execute(app_table.select()): print(f"Migrating App id={app.id}") op.bulk_insert( taskconfig_table, [{"app_id": app.id, "type": 0, "provider": app.provider, "data": app.data}], )
Example #25
Source File: dcf88e59aae4_add_lb_algorithm_source_ip_port.py From octavia with Apache License 2.0 | 5 votes |
def upgrade(): insert_table = sa.table( u'algorithm', sa.column(u'name', sa.String(255)), ) op.bulk_insert( insert_table, [ {'name': 'SOURCE_IP_PORT'} ] ) migrate_pools()
Example #26
Source File: 4a666113c7bb_add_operator_role.py From PowerDNS-Admin with MIT License | 5 votes |
def update_data(): setting_table = sa.sql.table('setting', sa.sql.column('id', sa.Integer), sa.sql.column('name', sa.String), sa.sql.column('value', sa.String), sa.sql.column('view', sa.String) ) # add new settings op.bulk_insert(setting_table, [ {'id': 44, 'name': 'ldap_operator_group', 'value': '', 'view': 'authentication'}, {'id': 45, 'name': 'allow_user_create_domain', 'value': 'False', 'view': 'basic'}, {'id': 46, 'name': 'record_quick_edit', 'value': 'True', 'view': 'basic'}, ] ) role_table = sa.sql.table('role', sa.sql.column('id', sa.Integer), sa.sql.column('name', sa.String), sa.sql.column('description', sa.String) ) # add new role op.bulk_insert(role_table, [ {'id': 3, 'name': 'Operator', 'description': 'Operator'} ] )
Example #27
Source File: d8b801498850_remove_stream_action_types.py From monasca-api with Apache License 2.0 | 5 votes |
def downgrade(): stream_action_types = op.create_table( 'stream_actions_action_type', sa.Column('name', sa.String(length=20), nullable=False), sa.PrimaryKeyConstraint('name'), mysql_charset='utf8mb4', mysql_collate='utf8mb4_unicode_ci') op.bulk_insert(stream_action_types, [{'name': 'FIRE'}, {'name': 'EXPIRE'}])
Example #28
Source File: 007_per_provider_table_split.py From sync-engine with GNU Affero General Public License v3.0 | 5 votes |
def genericize_imapaccount(): from inbox.models.session import session_scope from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) class ImapAccount_(Base): __table__ = Base.metadata.tables['imapaccount'] # Get data from columns-to-be-dropped with session_scope() as db_session: results = db_session.query(ImapAccount_.id, ImapAccount_.imap_host).all() to_insert = [dict(id=r[0], imap_host=r[1]) for r in results] # Rename table, add new columns. op.rename_table('imapaccount', 'account') op.add_column('account', sa.Column('type', sa.String(16))) # Create new table, insert data # The table op.create_table('imapaccount', sa.Column('imap_host', sa.String(512)), sa.Column('id', sa.Integer()), sa.ForeignKeyConstraint(['id'], ['account.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id')) # The ad-hoc table for insert table_ = table('imapaccount', column('imap_host', sa.String()), column('id', sa.Integer)) if to_insert: op.bulk_insert(table_, to_insert) # Drop columns now op.drop_column('account', 'imap_host')
Example #29
Source File: 007_per_provider_table_split.py From sync-engine with GNU Affero General Public License v3.0 | 5 votes |
def genericize_thread(): from inbox.models.session import session_scope from inbox.ignition import main_engine engine = main_engine(pool_size=1, max_overflow=0) Base = declarative_base() Base.metadata.reflect(engine) class Thread_(Base): __table__ = Base.metadata.tables['thread'] # Get data from columns-to-be-dropped with session_scope() as db_session: results = db_session.query(Thread_.id, Thread_.g_thrid).all() to_insert = [dict(id=r[0], g_thrid=r[1]) for r in results] # Add new columns op.add_column('thread', sa.Column('type', sa.String(16))) # Create new table, insert data # The table op.create_table('imapthread', sa.Column('g_thrid', sa.BigInteger(), nullable=True, index=True), sa.Column('id', sa.Integer()), sa.ForeignKeyConstraint(['id'], ['thread.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('id')) # The ad-hoc table for insert table_ = table('imapthread', column('g_thrid', sa.BigInteger), column('id', sa.Integer)) if to_insert: op.bulk_insert(table_, to_insert) # Drop columns now op.drop_column('thread', 'g_thrid')
Example #30
Source File: e78274be170e_create_organizations_table.py From commandment with MIT License | 5 votes |
def data_upgrades(): tbl = table(*TABLE[:-1]) op.bulk_insert(tbl, [ DEMO_ORGANIZATION ])