Python sqlalchemy.Date() Examples
The following are 30
code examples of sqlalchemy.Date().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
sqlalchemy
, or try the search function
.
Example #1
Source File: 0005_add_provider_stats.py From notifications-api with MIT License | 6 votes |
def upgrade(): op.create_table('provider_rates', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('valid_from', sa.DateTime(), nullable=False), sa.Column('provider', sa.Enum('mmg', 'twilio', 'firetext', 'ses', name='providers'), nullable=False), sa.Column('rate', sa.Numeric(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('provider_statistics', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('day', sa.Date(), nullable=False), sa.Column('provider', sa.Enum('mmg', 'twilio', 'firetext', 'ses', name='providers'), nullable=False), sa.Column('service_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('unit_count', sa.BigInteger(), nullable=False), sa.ForeignKeyConstraint(['service_id'], ['services.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_provider_statistics_service_id'), 'provider_statistics', ['service_id'], unique=False)
Example #2
Source File: helpers.py From planespotter with MIT License | 6 votes |
def strings_to_dates(model, dictionary): """Returns a new dictionary with all the mappings of `dictionary` but with date strings and intervals mapped to :class:`datetime.datetime` or :class:`datetime.timedelta` objects. The keys of `dictionary` are names of fields in the model specified in the constructor of this class. The values are values to set on these fields. If a field name corresponds to a field in the model which is a :class:`sqlalchemy.types.Date`, :class:`sqlalchemy.types.DateTime`, or :class:`sqlalchemy.Interval`, then the returned dictionary will have the corresponding :class:`datetime.datetime` or :class:`datetime.timedelta` Python object as the value of that mapping in place of the string. This function outputs a new dictionary; it does not modify the argument. """ result = {} for fieldname, value in dictionary.items(): if is_date_field(model, fieldname) and value is not None: if value.strip() == '': result[fieldname] = None elif value in CURRENT_TIME_MARKERS: result[fieldname] = getattr(func, value.lower())() else: value_as_datetime = parse_datetime(value) result[fieldname] = value_as_datetime # If the attribute on the model needs to be a Date object as # opposed to a DateTime object, just get the date component of # the datetime. fieldtype = get_field_type(model, fieldname) if isinstance(fieldtype, Date): result[fieldname] = value_as_datetime.date() elif (is_interval_field(model, fieldname) and value is not None and isinstance(value, int)): result[fieldname] = datetime.timedelta(seconds=value) else: result[fieldname] = value return result
Example #3
Source File: test_types.py From sqlalchemy with MIT License | 6 votes |
def test_interval_coercion(self): expr = column("bar", types.Interval) + column("foo", types.Date) eq_(expr.type._type_affinity, types.DateTime) expr = column("bar", types.Interval) * column("foo", types.Numeric) eq_(expr.type._type_affinity, types.Interval)
Example #4
Source File: test_functions.py From sqlalchemy with MIT License | 6 votes |
def test_conn_execute(self, connection): from sqlalchemy.sql.expression import FunctionElement from sqlalchemy.ext.compiler import compiles class myfunc(FunctionElement): type = Date() @compiles(myfunc) def compile_(elem, compiler, **kw): return compiler.process(func.current_date()) x = connection.execute(func.current_date()).scalar() y = connection.execute(func.current_date().select()).scalar() z = connection.scalar(func.current_date()) q = connection.scalar(myfunc()) assert (x == y == z == q) is True
Example #5
Source File: test_relationships.py From sqlalchemy with MIT License | 6 votes |
def define_tables(cls, metadata): Table( "items", metadata, Column( "item_policy_num", String(10), primary_key=True, key="policyNum", ), Column( "item_policy_eff_date", sa.Date, primary_key=True, key="policyEffDate", ), Column("item_type", String(20), primary_key=True, key="type"), Column( "item_id", Integer, primary_key=True, key="id", autoincrement=False, ), )
Example #6
Source File: test_functions.py From sqlalchemy with MIT License | 6 votes |
def test_extract_expression(self, connection): meta = self.metadata table = Table("test", meta, Column("dt", DateTime), Column("d", Date)) meta.create_all(connection) connection.execute( table.insert(), { "dt": datetime.datetime(2010, 5, 1, 12, 11, 10), "d": datetime.date(2010, 5, 1), }, ) rs = connection.execute( select([extract("year", table.c.dt), extract("month", table.c.d)]) ) row = rs.first() assert row[0] == 2010 assert row[1] == 5 rs.close()
Example #7
Source File: test_eager_relations.py From sqlalchemy with MIT License | 6 votes |
def define_tables(cls, metadata): Table( "users", metadata, Column( "id", Integer, primary_key=True, test_needs_autoincrement=True ), Column("name", String(50)), ) Table( "stuff", metadata, Column( "id", Integer, primary_key=True, test_needs_autoincrement=True ), Column("date", Date), Column("user_id", Integer, ForeignKey("users.id")), )
Example #8
Source File: 0979dfa2780a_create_rpg_account_table.py From aki with GNU Affero General Public License v3.0 | 5 votes |
def upgrade(): op.create_table( 'rpg_accounts', sa.Column('id', sa.Integer, primary_key=True, autoincrement=True), sa.Column('qq_number', sa.BigInteger, nullable=False, unique=True), sa.Column('created_dt', sa.DateTime(timezone=True)), sa.Column('total_coins', sa.BigInteger), sa.Column('total_sign_in', sa.BigInteger), sa.Column('last_sign_in_date', sa.Date), )
Example #9
Source File: 2020_04_13_3a87adc2088b_user_statistics_tracking.py From ultimate-poll-bot with MIT License | 5 votes |
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "user_statistic", sa.Column("date", sa.Date(), nullable=False), sa.Column("callback_calls", sa.Integer(), nullable=False), sa.Column("poll_callback_calls", sa.Integer(), nullable=False), sa.Column("created_polls", sa.Integer(), nullable=False), sa.Column("inline_shares", sa.Integer(), nullable=False), sa.Column("user_id", sa.BigInteger(), nullable=False), sa.ForeignKeyConstraint( ["user_id"], ["user.id"], name="user", ondelete="cascade" ), sa.PrimaryKeyConstraint("date", "user_id"), ) op.create_index( op.f("ix_user_statistic_user_id"), "user_statistic", ["user_id"], unique=False ) op.alter_column( "update", "count", existing_type=sa.INTEGER(), server_default=None, existing_nullable=False, ) op.add_column( "user", sa.Column("banned", sa.Boolean(), server_default="FALSE", nullable=False), ) # ### end Alembic commands ###
Example #10
Source File: 1d2ddd543133_log_dt.py From incubator-superset with Apache License 2.0 | 5 votes |
def upgrade(): op.add_column("logs", sa.Column("dt", sa.Date(), nullable=True))
Example #11
Source File: mapper.py From tableschema-sql-py with MIT License | 5 votes |
def restore_type(self, type): """Restore type from SQL """ # All dialects mapping = { ARRAY: 'array', sa.Boolean: 'boolean', sa.Date: 'date', sa.DateTime: 'datetime', sa.Float: 'number', sa.Integer: 'integer', JSONB: 'object', JSON: 'object', sa.Numeric: 'number', sa.Text: 'string', sa.Time: 'time', sa.VARCHAR: 'string', UUID: 'string', } # Get field type field_type = None for key, value in mapping.items(): if isinstance(type, key): field_type = value # Not supported if field_type is None: message = 'Type "%s" is not supported' raise tableschema.exceptions.StorageError(message % type) return field_type # Internal
Example #12
Source File: mapper.py From tableschema-sql-py with MIT License | 5 votes |
def convert_type(self, type): """Convert type to SQL """ # Default dialect mapping = { 'any': sa.Text, 'array': None, 'boolean': sa.Boolean, 'date': sa.Date, 'datetime': sa.DateTime, 'duration': None, 'geojson': None, 'geopoint': None, 'integer': sa.Integer, 'number': sa.Float, 'object': None, 'string': sa.Text, 'time': sa.Time, 'year': sa.Integer, 'yearmonth': None, } # Postgresql dialect if self.__dialect == 'postgresql': mapping.update({ 'array': JSONB, 'geojson': JSONB, 'number': sa.Numeric, 'object': JSONB, }) # Not supported type if type not in mapping: message = 'Field type "%s" is not supported' raise tableschema.exceptions.StorageError(message % type) return mapping[type]
Example #13
Source File: ChequeHistory.py From amir with GNU General Public License v3.0 | 5 votes |
def __init__(self, ChequeId, Amount, WrtDate, DueDate, Serial, Status, Cust, Account, TransId, Desc, Date, Delete=False, Id=1): self.ChequeId = ChequeId self.Amount = Amount self.WrtDate = WrtDate self.DueDate = DueDate self.Serial = Serial self.Status = Status self.Cust = Cust self.Account = Account self.TransId = TransId self.Desc = Desc self.Date = Date # self.Delete = Delete ## @}
Example #14
Source File: ecf3e6bf950e_added_application_settings_table_with_.py From Titan with GNU Affero General Public License v3.0 | 5 votes |
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('application_settings', sa.Column('id', sa.Integer(), nullable=False), sa.Column('donation_goal_progress', sa.Integer(), server_default='0', nullable=False), sa.Column('donation_goal_total', sa.Integer(), server_default='0', nullable=False), sa.Column('donation_goal_end', sa.Date(), nullable=True), sa.PrimaryKeyConstraint('id') ) # ### end Alembic commands ###
Example #15
Source File: orm.py From ShenzhenHouseInfoCrawler with Apache License 2.0 | 5 votes |
def __str__(self): return '<type "NewHouseSourceProjectSummary">{}, {}'.format(self.thedate, self.project_name) # class TestId(Base): # __tablename__ = 'test_id' # id = Column(Integer, primary_key=True, autoincrement=True) # thedate = Column(Date) # region = Column(String(255), nullable=False, unique=True)
Example #16
Source File: 30bb17c0dc76_.py From incubator-superset with Apache License 2.0 | 5 votes |
def downgrade(): with op.batch_alter_table("logs") as batch_op: batch_op.add_column(sa.Column("dt", sa.Date, default=date.today()))
Example #17
Source File: 585fd56a9833_adding_resource_date_verified_column.py From radremedy with Mozilla Public License 2.0 | 5 votes |
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('resource', sa.Column('date_verified', sa.Date(), nullable=True)) ### end Alembic commands ###
Example #18
Source File: test_types.py From sqlalchemy with MIT License | 5 votes |
def test_python_type(self): eq_(types.Integer().python_type, int) eq_(types.Numeric().python_type, decimal.Decimal) eq_(types.Numeric(asdecimal=False).python_type, float) eq_(types.LargeBinary().python_type, util.binary_type) eq_(types.Float().python_type, float) eq_(types.Interval().python_type, datetime.timedelta) eq_(types.Date().python_type, datetime.date) eq_(types.DateTime().python_type, datetime.datetime) eq_(types.String().python_type, str) eq_(types.Unicode().python_type, util.text_type) eq_(types.Enum("one", "two", "three").python_type, str) assert_raises( NotImplementedError, lambda: types.TypeEngine().python_type )
Example #19
Source File: test_types.py From sqlalchemy with MIT License | 5 votes |
def test_grouped_bind_adapt(self): expr = test_table.c.atimestamp == elements.Grouping( bindparam("thedate") ) eq_(expr.right.type._type_affinity, Date) eq_(expr.right.element.type._type_affinity, Date) expr = test_table.c.atimestamp == elements.Grouping( elements.Grouping(bindparam("thedate")) ) eq_(expr.right.type._type_affinity, Date) eq_(expr.right.element.type._type_affinity, Date) eq_(expr.right.element.element.type._type_affinity, Date)
Example #20
Source File: 2019_06_11_9e39ba5d94c4_add_current_date.py From ultimate-poll-bot with MIT License | 5 votes |
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column( "poll", sa.Column( "current_date", sa.Date(), server_default=sa.text("now()"), nullable=False ), ) # ### end Alembic commands ###
Example #21
Source File: 2019_06_29_b0789eb1e8a4_due_date_on_poll.py From ultimate-poll-bot with MIT License | 5 votes |
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column("poll", sa.Column("due_date", sa.Date(), nullable=True)) # ### end Alembic commands ###
Example #22
Source File: test_query.py From sqlalchemy with MIT License | 5 votes |
def define_tables(cls, metadata): Table( "t", metadata, Column("id", Integer, primary_key=True), Column("dtme", DateTime), Column("dt", Date), Column("tm", Time), Column("intv", postgresql.INTERVAL), Column("dttz", DateTime(timezone=True)), )
Example #23
Source File: d05e8a773f11_add_testcase_rollup.py From zeus with Apache License 2.0 | 5 votes |
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "testcase_rollup", sa.Column("id", zeus.db.types.guid.GUID(), nullable=False), sa.Column("hash", sa.String(length=40), nullable=False), sa.Column("name", sa.Text(), nullable=False), sa.Column("date", sa.Date(), nullable=False), sa.Column("total_runs", sa.Integer(), nullable=False), sa.Column("total_duration", sa.Integer(), nullable=False), sa.Column("runs_failed", sa.Integer(), nullable=False), sa.Column("runs_passed", sa.Integer(), nullable=False), sa.Column("repository_id", zeus.db.types.guid.GUID(), nullable=False), sa.ForeignKeyConstraint( ["repository_id"], ["repository.id"], ondelete="CASCADE" ), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint( "repository_id", "hash", "date", name="unq_testcase_rollup_hash" ), ) op.create_index( op.f("ix_testcase_rollup_repository_id"), "testcase_rollup", ["repository_id"], unique=False, ) # ### end Alembic commands ###
Example #24
Source File: load_redten_ipython_env.py From sci-pype with Apache License 2.0 | 5 votes |
def convert_date_string_to_date(date_str, optional_format="%Y-%m-%dT%H:%M:%S.%fZ"): date_to_return = None try: import datetime date_to_return = datetime.datetime.strptime(str(date_str), optional_format) except Exception,f: self.lg("ERROR: Failed Converting Date(" + str(date_str) + ") with Format(" + str(optional_format) + ")", 0) # end of tries to read this string as a valid date...
Example #25
Source File: fields.py From ormantic with BSD 3-Clause "New" or "Revised" License | 5 votes |
def Date( *, primary_key: bool = False, allow_null: bool = False, index: bool = False, unique: bool = False, ) -> Type[date]: namespace = dict( primary_key=primary_key, allow_null=allow_null, index=index, unique=unique, column_type=sqlalchemy.Date(), ) return type("Date", (date, ColumnFactory), namespace)
Example #26
Source File: future_info.py From data_integration_celery with GNU General Public License v3.0 | 5 votes |
def import_jq_future_info(chain_param=None, refresh=False): """ 获取全市场股票代码及名称 """ pattern = re.compile(r"[A-Za-z]+(?=\d{3,4}\.[A-Za-z]{4}$)") df = import_info_table(TYPE_NAME, insert_db=False) df['underlying_symbol'] = df['jq_code'].apply(lambda x: pattern.search(x).group()) logger.info("更新 %s 开始", TABLE_NAME) # has_table = engine_md.has_table(table_name) param_list = [ ('jq_code', String(20)), ('underlying_symbol', String(6)), ('display_name', String(20)), ('name', String(20)), ('start_date', Date), ('end_date', Date), ] # 设置 dtype dtype = {key: val for key, val in param_list} data_count = bunch_insert_p(df, table_name=TABLE_NAME, dtype=dtype, primary_keys=['jq_code']) logging.info("更新 %s 完成 存量数据 %d 条", TABLE_NAME, data_count)
Example #27
Source File: helpers.py From planespotter with MIT License | 5 votes |
def is_date_field(model, fieldname): """Returns ``True`` if and only if the field of `model` with the specified name corresponds to either a :class:`datetime.date` object or a :class:`datetime.datetime` object. """ fieldtype = get_field_type(model, fieldname) return isinstance(fieldtype, Date) or isinstance(fieldtype, DateTime)
Example #28
Source File: 5e04dbf30fb0_first_commit_for_prod.py From everyclass-server with Mozilla Public License 2.0 | 5 votes |
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('unavailable_room_report', sa.Column('record_id', sa.Integer(), nullable=False), sa.Column('room_id', sa.String(), nullable=True), sa.Column('date', sa.Date(), nullable=True), sa.Column('time', sa.String(length=4), nullable=True), sa.Column('reporter', sa.String(length=15), nullable=True), sa.Column('reporter_type', sa.String(), nullable=True), sa.PrimaryKeyConstraint('record_id'), sa.UniqueConstraint('room_id', 'date', 'time', 'reporter', 'reporter_type', name='unavailable_room_report_uniq') ) op.drop_index('idx_token', table_name='calendar_tokens') op.alter_column('identity_verify_requests', 'create_time', existing_type=postgresql.TIMESTAMP(timezone=True), nullable=True) op.create_index(op.f('ix_simple_passwords_time'), 'simple_passwords', ['time'], unique=False) op.drop_index('idx_host_time', table_name='visit_tracks') op.create_index('idx_host_time', 'visit_tracks', ['host_id', 'last_visit_time'], unique=False) # ### end Alembic commands ###
Example #29
Source File: plot.py From sticker-finder with MIT License | 5 votes |
def get_inline_query_performance_statistics(session): """Plot statistics regarding performance of inline query requests.""" creation_date = func.cast(InlineQueryRequest.created_at, Date).label( "creation_date" ) # Group the started users by date strict_search_subquery = ( session.query( creation_date, func.avg(InlineQueryRequest.duration).label("count") ) .group_by(creation_date) .order_by(creation_date) .all() ) strict_queries = [("strict", q[0], q[1]) for q in strict_search_subquery] # Combine the results in a single dataframe and name the columns request_statistics = strict_queries dataframe = pandas.DataFrame( request_statistics, columns=["type", "date", "duration"] ) months = mdates.MonthLocator() # every month months_fmt = mdates.DateFormatter("%Y-%m") # Plot each result set fig, ax = plt.subplots(figsize=(30, 15), dpi=120) for key, group in dataframe.groupby(["type"]): ax = group.plot(ax=ax, kind="bar", x="date", y="duration", label=key) ax.xaxis.set_major_locator(months) ax.xaxis.set_major_formatter(months_fmt) image = image_from_figure(fig) image.name = "request_duration_statistics.png" return image
Example #30
Source File: plot.py From sticker-finder with MIT License | 5 votes |
def get_inline_queries_statistics(session): """Create a plot showing the inline usage statistics.""" # Get all queries over time all_queries = ( session.query(cast(InlineQuery.created_at, Date), func.count(InlineQuery.id)) .group_by(cast(InlineQuery.created_at, Date)) .all() ) all_queries = [("all", q[0], q[1]) for q in all_queries] # Get all successful queries over time successful_queries = ( session.query(cast(InlineQuery.created_at, Date), func.count(InlineQuery.id)) .filter(InlineQuery.sticker_file_id.isnot(None)) .group_by(cast(InlineQuery.created_at, Date)) .all() ) successful_queries = [("successful", q[0], q[1]) for q in successful_queries] # Get all unsuccessful queries over time unsuccessful_queries = ( session.query(cast(InlineQuery.created_at, Date), func.count(InlineQuery.id)) .filter(InlineQuery.sticker_file_id.is_(None)) .group_by(cast(InlineQuery.created_at, Date)) .all() ) unsuccessful_queries = [("unsuccessful", q[0], q[1]) for q in unsuccessful_queries] # Combine the results in a single dataframe and name the columns inline_queries = all_queries + successful_queries + unsuccessful_queries dataframe = pandas.DataFrame(inline_queries, columns=["type", "date", "queries"]) # Plot each result set fig, ax = plt.subplots(figsize=(30, 15), dpi=120) for key, group in dataframe.groupby(["type"]): ax = group.plot(ax=ax, kind="line", x="date", y="queries", label=key) image = image_from_figure(fig) image.name = "inline_usage.png" return image