Python django.db.connections() Examples

The following are 30 code examples of django.db.connections(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module django.db , or try the search function .
Example #1
Source File: db.py    From GTDWeb with GNU General Public License v2.0 6 votes vote down vote up
def has_key(self, key, version=None):
        key = self.make_key(key, version=version)
        self.validate_key(key)

        db = router.db_for_read(self.cache_model_class)
        table = connections[db].ops.quote_name(self._table)

        if settings.USE_TZ:
            now = datetime.utcnow()
        else:
            now = datetime.now()
        now = now.replace(microsecond=0)

        with connections[db].cursor() as cursor:
            cursor.execute("SELECT cache_key FROM %s "
                           "WHERE cache_key = %%s and expires > %%s" % table,
                           [key, connections[db].ops.value_to_db_datetime(now)])
            return cursor.fetchone() is not None 
Example #2
Source File: query.py    From GTDWeb with GNU General Public License v2.0 6 votes vote down vote up
def scale(self, x, y, z=0.0, **kwargs):
        """
        Scales the geometry to a new size by multiplying the ordinates
        with the given x,y,z scale factors.
        """
        if connections[self.db].ops.spatialite:
            if z != 0.0:
                raise NotImplementedError('SpatiaLite does not support 3D scaling.')
            s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s',
                 'procedure_args': {'x': x, 'y': y},
                 'select_field': GeomField(),
                 }
        else:
            s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s,%(z)s',
                 'procedure_args': {'x': x, 'y': y, 'z': z},
                 'select_field': GeomField(),
                 }
        return self._spatial_attribute('scale', s, **kwargs) 
Example #3
Source File: views.py    From django-prometheus with Apache License 2.0 6 votes vote down vote up
def sql(request):
    databases = connections.databases.keys()
    query = request.GET.get("query")
    db = request.GET.get("database")
    if query and db:
        cursor = connections[db].cursor()
        cursor.execute(query, [])
        results = cursor.fetchall()
        return TemplateResponse(
            request,
            "sql.html",
            {"query": query, "rows": results, "databases": databases},
        )
    else:
        return TemplateResponse(
            request, "sql.html", {"query": None, "rows": None, "databases": databases}
        ) 
Example #4
Source File: query.py    From GTDWeb with GNU General Public License v2.0 6 votes vote down vote up
def translate(self, x, y, z=0.0, **kwargs):
        """
        Translates the geometry to a new location using the given numeric
        parameters as offsets.
        """
        if connections[self.db].ops.spatialite:
            if z != 0.0:
                raise NotImplementedError('SpatiaLite does not support 3D translation.')
            s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s',
                 'procedure_args': {'x': x, 'y': y},
                 'select_field': GeomField(),
                 }
        else:
            s = {'procedure_fmt': '%(geo_col)s,%(x)s,%(y)s,%(z)s',
                 'procedure_args': {'x': x, 'y': y, 'z': z},
                 'select_field': GeomField(),
                 }
        return self._spatial_attribute('translate', s, **kwargs) 
Example #5
Source File: benchmark.py    From django-cachalot with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def run(self):
        for db_alias in settings.DATABASES:
            self.db_alias = db_alias
            self.db_vendor = connections[self.db_alias].vendor
            print('Benchmarking %s…' % self.db_vendor)
            for cache_alias in settings.CACHES:
                cache = caches[cache_alias]
                self.cache_name = cache.__class__.__name__[:-5].lower()
                with override_settings(CACHALOT_CACHE=cache_alias):
                    self.execute_benchmark()

        self.df = pd.DataFrame.from_records(self.data)
        if not os.path.exists(RESULTS_PATH):
            os.mkdir(RESULTS_PATH)
        self.df.to_csv(os.path.join(RESULTS_PATH, 'data.csv'))

        self.xlim = (0, self.df['time'].max() * 1.01)
        self.output('db')
        self.output('cache') 
Example #6
Source File: query.py    From GTDWeb with GNU General Public License v2.0 6 votes vote down vote up
def _geomset_attribute(self, func, geom, tolerance=0.05, **kwargs):
        """
        DRY routine for setting up a GeoQuerySet method that attaches a
        Geometry attribute and takes a Geoemtry parameter.  This is used
        for geometry set-like operations (e.g., intersection, difference,
        union, sym_difference).
        """
        s = {
            'geom_args': ('geom',),
            'select_field': GeomField(),
            'procedure_fmt': '%(geo_col)s,%(geom)s',
            'procedure_args': {'geom': geom},
        }
        if connections[self.db].ops.oracle:
            s['procedure_fmt'] += ',%(tolerance)s'
            s['procedure_args']['tolerance'] = tolerance
        return self._spatial_attribute(func, s, **kwargs) 
Example #7
Source File: query.py    From GTDWeb with GNU General Public License v2.0 6 votes vote down vote up
def is_nullable(self, field):
        """
        A helper to check if the given field should be treated as nullable.

        Some backends treat '' as null and Django treats such fields as
        nullable for those backends. In such situations field.null can be
        False even if we should treat the field as nullable.
        """
        # We need to use DEFAULT_DB_ALIAS here, as QuerySet does not have
        # (nor should it have) knowledge of which connection is going to be
        # used. The proper fix would be to defer all decisions where
        # is_nullable() is needed to the compiler stage, but that is not easy
        # to do currently.
        if ((connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls)
                and field.empty_strings_allowed):
            return True
        else:
            return field.null 
Example #8
Source File: sql.py    From django-test-migrations with MIT License 6 votes vote down vote up
def drop_models_tables(
    database_name: str,
    style: Optional[Style] = None,
) -> None:
    """Drop all installed Django's models tables."""
    style = style or no_style()
    connection = connections[database_name]
    tables = connection.introspection.django_table_names(
        only_existing=True,
        include_views=False,
    )
    sql_drop_tables = [
        connection.SchemaEditorClass.sql_delete_table % {
            'table': style.SQL_FIELD(connection.ops.quote_name(table)),
        }
        for table in tables
    ]
    if sql_drop_tables:
        get_execute_sql_flush_for(connection)(database_name, sql_drop_tables) 
Example #9
Source File: test_db.py    From django-prometheus with Apache License 2.0 6 votes vote down vote up
def testHistograms(self):
        cursor_db1 = connections["test_db_1"].cursor()
        cursor_db2 = connections["test_db_2"].cursor()
        cursor_db1.execute("SELECT 1")
        for _ in range(200):
            cursor_db2.execute("SELECT 2")
        assert (
            self.getMetric(
                "django_db_query_duration_seconds_count",
                alias="test_db_1",
                vendor="sqlite",
            )
            > 0
        )
        assert (
            self.getMetric(
                "django_db_query_duration_seconds_count",
                alias="test_db_2",
                vendor="sqlite",
            )
            >= 200
        ) 
Example #10
Source File: sources.py    From koku with GNU Affero General Public License v3.0 6 votes vote down vote up
def check_migrations(self):
        """
        Check the status of database migrations.

        The koku API server is responsible for running all database migrations.  This method
        will return the state of the database and whether or not all migrations have been completed.

        Hat tip to the Stack Overflow contributor: https://stackoverflow.com/a/31847406

        Returns:
            Boolean - True if database is available and migrations have completed.  False otherwise.

        """
        connection = connections[DEFAULT_DB_ALIAS]
        connection.prepare_database()
        executor = MigrationExecutor(connection)
        targets = executor.loader.graph.leaf_nodes()
        return not executor.migration_plan(targets) 
Example #11
Source File: test_basic.py    From sentry-python with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def test_sql_psycopg2_string_composition(sentry_init, capture_events, query):
    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
        _experiments={"record_sql_params": True},
    )
    from django.db import connections

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    import psycopg2.sql

    sql = connections["postgres"].cursor()

    events = capture_events()
    with pytest.raises(ProgrammingError):
        sql.execute(query(psycopg2.sql), {"my_param": 10})

    capture_message("HI")

    (event,) = events
    crumb = event["breadcrumbs"][-1]
    assert crumb["message"] == ('SELECT %(my_param)s FROM "foobar"')
    assert crumb["data"]["db.params"] == {"my_param": 10} 
Example #12
Source File: query.py    From bioforum with MIT License 6 votes vote down vote up
def _execute_query(self):
        connection = connections[self.using]

        # Adapt parameters to the database, as much as possible considering
        # that the target type isn't known. See #17755.
        params_type = self.params_type
        adapter = connection.ops.adapt_unknown_value
        if params_type is tuple:
            params = tuple(adapter(val) for val in self.params)
        elif params_type is dict:
            params = {key: adapter(val) for key, val in self.params.items()}
        else:
            raise RuntimeError("Unexpected params type: %s" % params_type)

        self.cursor = connection.cursor()
        self.cursor.execute(self.sql, params) 
Example #13
Source File: query.py    From bioforum with MIT License 6 votes vote down vote up
def is_nullable(self, field):
        """
        Check if the given field should be treated as nullable.

        Some backends treat '' as null and Django treats such fields as
        nullable for those backends. In such situations field.null can be
        False even if we should treat the field as nullable.
        """
        # We need to use DEFAULT_DB_ALIAS here, as QuerySet does not have
        # (nor should it have) knowledge of which connection is going to be
        # used. The proper fix would be to defer all decisions where
        # is_nullable() is needed to the compiler stage, but that is not easy
        # to do currently.
        if connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and field.empty_strings_allowed:
            return True
        else:
            return field.null 
Example #14
Source File: database.py    From koku with GNU Affero General Public License v3.0 6 votes vote down vote up
def check_migrations():
    """
    Check the status of database migrations.
    The koku API server is responsible for running all database migrations.  This method
    will return the state of the database and whether or not all migrations have been completed.
    Hat tip to the Stack Overflow contributor: https://stackoverflow.com/a/31847406
    Returns:
        Boolean - True if database is available and migrations have completed.  False otherwise.
    """
    try:
        connection = connections[DEFAULT_DB_ALIAS]
        connection.prepare_database()
        executor = MigrationExecutor(connection)
        targets = executor.loader.graph.leaf_nodes()
        return not executor.migration_plan(targets)
    except OperationalError:
        return False 
Example #15
Source File: db.py    From GTDWeb with GNU General Public License v2.0 6 votes vote down vote up
def _cull(self, db, cursor, now):
        if self._cull_frequency == 0:
            self.clear()
        else:
            # When USE_TZ is True, 'now' will be an aware datetime in UTC.
            now = now.replace(tzinfo=None)
            table = connections[db].ops.quote_name(self._table)
            cursor.execute("DELETE FROM %s WHERE expires < %%s" % table,
                           [connections[db].ops.value_to_db_datetime(now)])
            cursor.execute("SELECT COUNT(*) FROM %s" % table)
            num = cursor.fetchone()[0]
            if num > self._max_entries:
                cull_num = num // self._cull_frequency
                cursor.execute(
                    connections[db].ops.cache_key_culling_sql() % table,
                    [cull_num])
                cursor.execute("DELETE FROM %s "
                               "WHERE cache_key < %%s" % table,
                               [cursor.fetchone()[0]]) 
Example #16
Source File: runner.py    From bioforum with MIT License 6 votes vote down vote up
def _init_worker(counter):
    """
    Switch to databases dedicated to this worker.

    This helper lives at module-level because of the multiprocessing module's
    requirements.
    """

    global _worker_id

    with counter.get_lock():
        counter.value += 1
        _worker_id = counter.value

    for alias in connections:
        connection = connections[alias]
        settings_dict = connection.creation.get_test_db_clone_settings(str(_worker_id))
        # connection.settings_dict must be updated in place for changes to be
        # reflected in django.db.connections. If the following line assigned
        # connection.settings_dict = settings_dict, new threads would connect
        # to the default database instead of the appropriate clone.
        connection.settings_dict.update(settings_dict)
        connection.close() 
Example #17
Source File: db.py    From GTDWeb with GNU General Public License v2.0 5 votes vote down vote up
def clear(self):
        db = router.db_for_write(self.cache_model_class)
        table = connections[db].ops.quote_name(self._table)
        with connections[db].cursor() as cursor:
            cursor.execute('DELETE FROM %s' % table) 
Example #18
Source File: signals.py    From bioforum with MIT License 5 votes vote down vote up
def get_hstore_oids(connection_alias):
    """Return hstore and hstore array OIDs."""
    with connections[connection_alias].cursor() as cursor:
        cursor.execute(
            "SELECT t.oid, typarray "
            "FROM pg_type t "
            "JOIN pg_namespace ns ON typnamespace = ns.oid "
            "WHERE typname = 'hstore'"
        )
        oids = []
        array_oids = []
        for row in cursor:
            oids.append(row[0])
            array_oids.append(row[1])
        return tuple(oids), tuple(array_oids) 
Example #19
Source File: __init__.py    From bioforum with MIT License 5 votes vote down vote up
def _check_backend_specific_checks(self, **kwargs):
        app_label = self.model._meta.app_label
        for db in connections:
            if router.allow_migrate(db, app_label, model_name=self.model._meta.model_name):
                return connections[db].validation.check_field(self, **kwargs)
        return [] 
Example #20
Source File: query.py    From bioforum with MIT License 5 votes vote down vote up
def __iter__(self):
        # Always execute a new query for a new iterator.
        # This could be optimized with a cache at the expense of RAM.
        self._execute_query()
        if not connections[self.using].features.can_use_chunked_reads:
            # If the database can't use chunked reads we need to make sure we
            # evaluate the entire query up front.
            result = list(self.cursor)
        else:
            result = self.cursor
        return iter(result) 
Example #21
Source File: query.py    From bioforum with MIT License 5 votes vote down vote up
def get_columns(self):
        if self.cursor is None:
            self._execute_query()
        converter = connections[self.using].introspection.column_name_converter
        return [converter(column_meta[0])
                for column_meta in self.cursor.description] 
Example #22
Source File: subqueries.py    From bioforum with MIT License 5 votes vote down vote up
def delete_qs(self, query, using):
        """
        Delete the queryset in one SQL query (if possible). For simple queries
        this is done by copying the query.query.where to self.query, for
        complex queries by using subquery.
        """
        innerq = query.query
        # Make sure the inner query has at least one table in use.
        innerq.get_initial_alias()
        # The same for our new query.
        self.get_initial_alias()
        innerq_used_tables = tuple([t for t in innerq.alias_map if innerq.alias_refcount[t]])
        if not innerq_used_tables or innerq_used_tables == tuple(self.alias_map):
            # There is only the base table in use in the query.
            self.where = innerq.where
        else:
            pk = query.model._meta.pk
            if not connections[using].features.update_can_self_select:
                # We can't do the delete using subquery.
                values = list(query.values_list('pk', flat=True))
                if not values:
                    return 0
                return self.delete_batch(values, using)
            else:
                innerq.clear_select_clause()
                innerq.select = [
                    pk.get_col(self.get_initial_alias())
                ]
                values = innerq
            self.where = self.where_class()
            self.add_q(Q(pk__in=values))
        cursor = self.get_compiler(using).execute_sql(CURSOR)
        return cursor.rowcount if cursor else 0 
Example #23
Source File: deletion.py    From bioforum with MIT License 5 votes vote down vote up
def CASCADE(collector, field, sub_objs, using):
    collector.collect(sub_objs, source=field.remote_field.model,
                      source_attr=field.name, nullable=field.null)
    if field.null and not connections[using].features.can_defer_constraint_checks:
        collector.add_field_update(field, None, sub_objs) 
Example #24
Source File: utils.py    From django-cachalot with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _get_tables(db_alias, query):
    if query.select_for_update or (
            not cachalot_settings.CACHALOT_CACHE_RANDOM
            and '?' in query.order_by):
        raise UncachableQuery

    try:
        if query.extra_select:
            raise IsRawQuery
        # Gets all tables already found by the ORM.
        tables = set(query.table_map)
        tables.add(query.get_meta().db_table)
        # Gets tables in subquery annotations.
        for annotation in query.annotations.values():
            if isinstance(annotation, Subquery):
                # Django 2.2+ removed queryset in favor of simply using query
                try:
                    tables.update(_get_tables(db_alias, annotation.queryset.query))
                except AttributeError:
                    tables.update(_get_tables(db_alias, annotation.query))
        # Gets tables in WHERE subqueries.
        for subquery in _find_subqueries_in_where(query.where.children):
            tables.update(_get_tables(db_alias, subquery))
        # Gets tables in HAVING subqueries.
        if isinstance(query, AggregateQuery):
            tables.update(
                _get_tables_from_sql(connections[db_alias], query.subquery))
        # Gets tables in combined queries
        # using `.union`, `.intersection`, or `difference`.
        if query.combined_queries:
            for combined_query in query.combined_queries:
                tables.update(_get_tables(db_alias, combined_query))
    except IsRawQuery:
        sql = query.get_compiler(db_alias).as_sql()[0].lower()
        tables = _get_tables_from_sql(connections[db_alias], sql)

    if not are_all_cachable(tables):
        raise UncachableQuery
    return tables 
Example #25
Source File: test_db.py    From django-prometheus with Apache License 2.0 5 votes vote down vote up
def testCounters(self):
        r = self.saveRegistry()
        cursor = connections["postgis"].cursor()

        for _ in range(20):
            cursor.execute("SELECT 1")

        self.assertMetricCompare(
            r,
            lambda a, b: a + 20 <= b < a + 25,
            "django_db_execute_total",
            alias="postgis",
            vendor="postgresql",
        ) 
Example #26
Source File: test_db.py    From django-prometheus with Apache License 2.0 5 votes vote down vote up
def testCounters(self):
        registry = self.saveRegistry()
        cursor = connections["postgresql"].cursor()

        for _ in range(20):
            cursor.execute("SELECT 1")

        self.assertMetricCompare(
            registry,
            lambda a, b: a + 20 <= b < a + 25,
            "django_db_execute_total",
            alias="postgresql",
            vendor="postgresql",
        ) 
Example #27
Source File: test_basic.py    From sentry-python with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def test_sql_dict_query_params(sentry_init, capture_events):
    sentry_init(
        integrations=[DjangoIntegration()],
        send_default_pii=True,
        _experiments={"record_sql_params": True},
    )

    from django.db import connections

    if "postgres" not in connections:
        pytest.skip("postgres tests disabled")

    sql = connections["postgres"].cursor()

    events = capture_events()
    with pytest.raises(ProgrammingError):
        sql.execute(
            """SELECT count(*) FROM people_person WHERE foo = %(my_foo)s""",
            {"my_foo": 10},
        )

    capture_message("HI")
    (event,) = events

    crumb = event["breadcrumbs"][-1]
    assert crumb["message"] == (
        "SELECT count(*) FROM people_person WHERE foo = %(my_foo)s"
    )
    assert crumb["data"]["db.params"] == {"my_foo": 10} 
Example #28
Source File: test_db.py    From django-prometheus with Apache License 2.0 5 votes vote down vote up
def testExecuteMany(self):
        registry = self.saveRegistry()
        cursor_db1 = connections["test_db_1"].cursor()
        cursor_db1.executemany(
            "INSERT INTO testapp_lawn(location) VALUES (?)",
            [("Paris",), ("New York",), ("Berlin",), ("San Francisco",)],
        )
        self.assertMetricDiff(
            registry,
            4,
            "django_db_execute_many_total",
            alias="test_db_1",
            vendor="sqlite",
        ) 
Example #29
Source File: test_db.py    From django-prometheus with Apache License 2.0 5 votes vote down vote up
def testCounters(self):
        cursor_db1 = connections["test_db_1"].cursor()
        cursor_db2 = connections["test_db_2"].cursor()
        cursor_db1.execute("SELECT 1")
        for _ in range(200):
            cursor_db2.execute("SELECT 2")
        cursor_db1.execute("SELECT 3")
        try:
            cursor_db1.execute("this is clearly not valid SQL")
        except Exception:
            pass

        self.assertMetricEquals(
            1,
            "django_db_errors_total",
            alias="test_db_1",
            vendor="sqlite",
            type="OperationalError",
        )
        assert (
            self.getMetric(
                "django_db_execute_total", alias="test_db_1", vendor="sqlite"
            )
            > 0
        )
        assert (
            self.getMetric(
                "django_db_execute_total", alias="test_db_2", vendor="sqlite"
            )
            >= 200
        ) 
Example #30
Source File: test_db.py    From django-prometheus with Apache License 2.0 5 votes vote down vote up
def testConfigHasExpectedDatabases(self):
        """Not a real unit test: ensures that testapp.settings contains the
        databases this test expects."""
        assert "default" in connections.databases.keys()
        assert "test_db_1" in connections.databases.keys()
        assert "test_db_2" in connections.databases.keys()