Python django.conf.settings.DATABASES Examples
The following are 30
code examples of django.conf.settings.DATABASES().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
django.conf.settings
, or try the search function
.
Example #1
Source File: creation.py From bioforum with MIT License | 6 votes |
def _switch_to_test_user(self, parameters): """ Switch to the user that's used for creating the test database. Oracle doesn't have the concept of separate databases under the same user, so a separate user is used; see _create_test_db(). The main user is also needed for cleanup when testing is completed, so save its credentials in the SAVED_USER/SAVED_PASSWORD key in the settings dict. """ real_settings = settings.DATABASES[self.connection.alias] real_settings['SAVED_USER'] = self.connection.settings_dict['SAVED_USER'] = \ self.connection.settings_dict['USER'] real_settings['SAVED_PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD'] = \ self.connection.settings_dict['PASSWORD'] real_test_settings = real_settings['TEST'] test_settings = self.connection.settings_dict['TEST'] real_test_settings['USER'] = real_settings['USER'] = test_settings['USER'] = \ self.connection.settings_dict['USER'] = parameters['user'] real_settings['PASSWORD'] = self.connection.settings_dict['PASSWORD'] = parameters['password']
Example #2
Source File: panels.py From django-cachalot with BSD 3-Clause "New" or "Revised" License | 6 votes |
def collect_invalidations(self): models = apps.get_models() data = defaultdict(list) cache = cachalot_caches.get_cache() for db_alias in settings.DATABASES: get_table_cache_key = cachalot_settings.CACHALOT_TABLE_KEYGEN model_cache_keys = { get_table_cache_key(db_alias, model._meta.db_table): model for model in models} for cache_key, timestamp in cache.get_many( model_cache_keys.keys()).items(): invalidation = datetime.fromtimestamp(timestamp) model = model_cache_keys[cache_key] data[db_alias].append( (model._meta.app_label, model.__name__, invalidation)) if self.last_invalidation is None \ or invalidation > self.last_invalidation: self.last_invalidation = invalidation data[db_alias].sort(key=lambda row: row[2], reverse=True) self.record_stats({'invalidations_per_db': data.items()})
Example #3
Source File: test_runner.py From zulip with Apache License 2.0 | 6 votes |
def setup_test_environment(self, *args: Any, **kwargs: Any) -> Any: settings.DATABASES['default']['NAME'] = settings.BACKEND_DATABASE_TEMPLATE # We create/destroy the test databases in run_tests to avoid # duplicate work when running in parallel mode. # Write the template database ids to a file that we can # reference for cleaning them up if they leak. filepath = os.path.join(get_dev_uuid_var_path(), TEMPLATE_DATABASE_DIR, get_database_id()) os.makedirs(os.path.dirname(filepath), exist_ok=True) with open(filepath, "w") as f: if self.parallel > 1: for index in range(self.parallel): f.write(get_database_id(index + 1) + "\n") else: f.write(get_database_id() + "\n") # Check if we are in serial mode to avoid unnecessarily making a directory. # We add "worker_0" in the path for consistency with parallel mode. if self.parallel == 1: initialize_worker_path(0) return super().setup_test_environment(*args, **kwargs)
Example #4
Source File: benchmark.py From django-cachalot with BSD 3-Clause "New" or "Revised" License | 6 votes |
def run(self): for db_alias in settings.DATABASES: self.db_alias = db_alias self.db_vendor = connections[self.db_alias].vendor print('Benchmarking %s…' % self.db_vendor) for cache_alias in settings.CACHES: cache = caches[cache_alias] self.cache_name = cache.__class__.__name__[:-5].lower() with override_settings(CACHALOT_CACHE=cache_alias): self.execute_benchmark() self.df = pd.DataFrame.from_records(self.data) if not os.path.exists(RESULTS_PATH): os.mkdir(RESULTS_PATH) self.df.to_csv(os.path.join(RESULTS_PATH, 'data.csv')) self.xlim = (0, self.df['time'].max() * 1.01) self.output('db') self.output('cache')
Example #5
Source File: drop_pg_schema.py From zappa-django-utils with MIT License | 6 votes |
def handle(self, *args, **options): self.stdout.write(self.style.SUCCESS('Starting schema deletion...')) dbname = settings.DATABASES['default']['NAME'] user = settings.DATABASES['default']['USER'] password = settings.DATABASES['default']['PASSWORD'] host = settings.DATABASES['default']['HOST'] con = connect(dbname=dbname, user=user, host=host, password=password) self.stdout.write(self.style.SUCCESS('Removing schema {schema} from database {dbname}' .format(schema=settings.SCHEMA, dbname=dbname))) con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) cur = con.cursor() cur.execute('DROP SCHEMA {schema} CASCADE;'.format(schema=settings.SCHEMA)) cur.close() con.close() self.stdout.write(self.style.SUCCESS('All done.'))
Example #6
Source File: create_mysql_db.py From zappa-django-utils with MIT License | 6 votes |
def handle(self, *args, **options): self.stdout.write(self.style.SUCCESS('Starting db creation')) dbname = options.get('db_name') or settings.DATABASES['default']['NAME'] user = options.get('user') or settings.DATABASES['default']['USER'] password = options.get('password') or settings.DATABASES['default']['PASSWORD'] host = settings.DATABASES['default']['HOST'] con = db.connect(user=user, host=host, password=password) cur = con.cursor() cur.execute(f'CREATE DATABASE {dbname}') cur.execute(f'ALTER DATABASE `{dbname}` CHARACTER SET utf8') cur.close() con.close() self.stdout.write(self.style.SUCCESS('All Done'))
Example #7
Source File: create_pg_db.py From zappa-django-utils with MIT License | 6 votes |
def handle(self, *args, **options): self.stdout.write(self.style.SUCCESS('Starting DB creation..')) dbname = settings.DATABASES['default']['NAME'] user = settings.DATABASES['default']['USER'] password = settings.DATABASES['default']['PASSWORD'] host = settings.DATABASES['default']['HOST'] self.stdout.write(self.style.SUCCESS('Connecting to host..')) con = connect(dbname='postgres', user=user, host=host, password=password) con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) self.stdout.write(self.style.SUCCESS('Creating database')) cur = con.cursor() cur.execute('CREATE DATABASE ' + dbname) cur.close() con.close() self.stdout.write(self.style.SUCCESS('All done!'))
Example #8
Source File: drop_pg_db.py From zappa-django-utils with MIT License | 6 votes |
def handle(self, *args, **options): self.stdout.write(self.style.SUCCESS('Starting to drop DB..')) dbname = settings.DATABASES['default']['NAME'] user = settings.DATABASES['default']['USER'] password = settings.DATABASES['default']['PASSWORD'] host = settings.DATABASES['default']['HOST'] self.stdout.write(self.style.SUCCESS('Connecting to host..')) con = connect(dbname='postgres', user=user, host=host, password=password) con.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) self.stdout.write(self.style.SUCCESS("Dropping database '{}'".format(dbname))) cur = con.cursor() cur.execute('DROP DATABASE ' + dbname) cur.close() con.close() self.stdout.write(self.style.SUCCESS('All done!'))
Example #9
Source File: base.py From GTDWeb with GNU General Public License v2.0 | 6 votes |
def _nodb_connection(self): nodb_connection = super(DatabaseWrapper, self)._nodb_connection try: nodb_connection.ensure_connection() except (DatabaseError, WrappedDatabaseError): warnings.warn( "Normally Django will use a connection to the 'postgres' database " "to avoid running initialization queries against the production " "database when it's not needed (for example, when running tests). " "Django was unable to create a connection to the 'postgres' database " "and will use the default database instead.", RuntimeWarning ) settings_dict = self.settings_dict.copy() settings_dict['NAME'] = settings.DATABASES[DEFAULT_DB_ALIAS]['NAME'] nodb_connection = self.__class__( self.settings_dict.copy(), alias=self.alias, allow_thread_sharing=False) return nodb_connection
Example #10
Source File: creation.py From GTDWeb with GNU General Public License v2.0 | 6 votes |
def destroy_test_db(self, old_database_name, verbosity=1, keepdb=False): """ Destroy a test database, prompting the user for confirmation if the database already exists. """ self.connection.close() test_database_name = self.connection.settings_dict['NAME'] if verbosity >= 1: test_db_repr = '' action = 'Destroying' if verbosity >= 2: test_db_repr = " ('%s')" % test_database_name if keepdb: action = 'Preserving' print("%s test database for alias '%s'%s..." % ( action, self.connection.alias, test_db_repr)) # if we want to preserve the database # skip the actual destroying piece. if not keepdb: self._destroy_test_db(test_database_name, verbosity) # Restore the original database name settings.DATABASES[self.connection.alias]["NAME"] = old_database_name self.connection.settings_dict["NAME"] = old_database_name
Example #11
Source File: information_schema.py From urbanfootprint with GNU General Public License v3.0 | 6 votes |
def get_table_description(self, cursor, full_table_name): """ Override the parent method to take schemas into account, sigh :param cursor: :param full_table_name: :return: """ schema, table = parse_schema_and_table(full_table_name) # conn = psycopg2.connect(**pg_connection_parameters(settings.DATABASES['default'])) # conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) # cursor = conn.cursor() cursor.execute(""" SELECT column_name, is_nullable FROM information_schema.columns WHERE table_name = %s and table_schema = %s""", [table, schema]) null_map = dict(cursor.fetchall()) cursor.execute('SELECT * FROM "{schema}"."{table}" LIMIT 1'.format(schema=schema, table=table)) return [tuple([item for item in line[:6]] + [null_map[line[0]]==u'YES']) for line in cursor.description]
Example #12
Source File: base.py From bioforum with MIT License | 6 votes |
def get_connection_params(self): settings_dict = self.settings_dict # None may be used to connect to the default 'postgres' db if settings_dict['NAME'] == '': raise ImproperlyConfigured( "settings.DATABASES is improperly configured. " "Please supply the NAME value.") conn_params = { 'database': settings_dict['NAME'] or 'postgres', } conn_params.update(settings_dict['OPTIONS']) conn_params.pop('isolation_level', None) if settings_dict['USER']: conn_params['user'] = settings_dict['USER'] if settings_dict['PASSWORD']: conn_params['password'] = settings_dict['PASSWORD'] if settings_dict['HOST']: conn_params['host'] = settings_dict['HOST'] if settings_dict['PORT']: conn_params['port'] = settings_dict['PORT'] return conn_params
Example #13
Source File: conftest.py From ideascube with GNU Affero General Public License v3.0 | 6 votes |
def pytest_configure(config): from django.conf import settings # This is already supposed to be the case by default, and we even tried # setting it explicitly anyway. # # But somehow, at the very beginning of the test suite (when running the # migrations or when the post_migrate signal is fired), the transient # database is on the filesystem (the value of NAME). # # We can't figure out why that is, it might be a bug in pytest-django, or # worse in django itself. # # Somehow the default database is always in memory, though. settings.DATABASES['transient']['TEST_NAME'] = ':memory:' # The documentation says not to use the ManifestStaticFilesStorage for # tests, and indeed if we do they fail. settings.STATICFILES_STORAGE = ( 'django.contrib.staticfiles.storage.StaticFilesStorage')
Example #14
Source File: base.py From bioforum with MIT License | 6 votes |
def _nodb_connection(self): nodb_connection = super()._nodb_connection try: nodb_connection.ensure_connection() except (Database.DatabaseError, WrappedDatabaseError): warnings.warn( "Normally Django will use a connection to the 'postgres' database " "to avoid running initialization queries against the production " "database when it's not needed (for example, when running tests). " "Django was unable to create a connection to the 'postgres' database " "and will use the default database instead.", RuntimeWarning ) settings_dict = self.settings_dict.copy() settings_dict['NAME'] = settings.DATABASES[DEFAULT_DB_ALIAS]['NAME'] nodb_connection = self.__class__( self.settings_dict.copy(), alias=self.alias, allow_thread_sharing=False) return nodb_connection
Example #15
Source File: apps.py From coursys with GNU General Public License v3.0 | 6 votes |
def sqlite_check(app_configs, **kwargs): errors = [] if 'sqlite' not in settings.DATABASES['default']['ENGINE']: # not using sqlite, so don't worry return errors import sqlite3 if sqlite3.sqlite_version_info < (3, 12): errors.append( Warning( 'SQLite version problem', hint='A bug is sqlite version 3.11.x causes a segfault in our tests. Upgrading to >=3.14 is suggested. This is only a warning because many things still work. Just not the tests.', id='coredata.E001', ) ) return errors
Example #16
Source File: models.py From django-twitter-stream with MIT License | 6 votes |
def count_approx(cls): """ Get the approximate number of tweets. Executes quickly, even on large InnoDB tables. """ if django_settings.DATABASES['default']['ENGINE'].endswith('mysql'): query = "SHOW TABLE STATUS WHERE Name = %s" cursor = connection.cursor() cursor.execute(query, [cls._meta.db_table]) desc = cursor.description row = cursor.fetchone() row = dict(zip([col[0].lower() for col in desc], row)) return int(row['rows']) else: return cls.objects.count()
Example #17
Source File: panel.py From coursys with GNU General Public License v3.0 | 6 votes |
def settings_info(): info = [] info.append(('Deploy mode', settings.DEPLOY_MODE)) info.append(('Database engine', settings.DATABASES['default']['ENGINE'])) info.append(('Authentication Backends', settings.AUTHENTICATION_BACKENDS)) info.append(('Cache backend', settings.CACHES['default']['BACKEND'])) info.append(('Haystack engine', settings.HAYSTACK_CONNECTIONS['default']['ENGINE'])) info.append(('Email backend', settings.EMAIL_BACKEND)) if hasattr(settings, 'CELERY_EMAIL') and settings.CELERY_EMAIL: info.append(('Celery email backend', settings.CELERY_EMAIL_BACKEND)) if hasattr(settings, 'CELERY_BROKER_URL'): info.append(('Celery broker', settings.CELERY_BROKER_URL.split(':')[0])) DATABASES = copy.deepcopy(settings.DATABASES) for d in DATABASES: if 'PASSWORD' in DATABASES[d]: DATABASES[d]['PASSWORD'] = '*****' info.append(('DATABASES', mark_safe('<pre>'+escape(pprint.pformat(DATABASES))+'</pre>'))) return info
Example #18
Source File: information_schema.py From urbanfootprint with GNU General Public License v3.0 | 5 votes |
def sync_geometry_columns(schema=None, table=None): """ Adds one or more entries to the PostGIS geometry_columns :param schema: Optional database schema to which to limit search :param table: Optional table name to which to limit search :return: """ tables_with_geometry = InformationSchema.objects.tables_with_geometry(schema=schema, table=table) for information_scheme in tables_with_geometry: conn = psycopg2.connect(**pg_connection_parameters(settings.DATABASES['default'])) conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cursor = conn.cursor() sql = "select ST_CoordDim({2}), ST_SRID({2}), ST_GeometryType({2}) from {1}.{0}".format(information_scheme.table_name, information_scheme.table_schema, information_scheme.column_name) ret = cursor.execute(sql) if ret and len(ret) > 0: coord, srid, geom_type = ret[0] else: coord, srid, geom_type = (2, 4326, 'GEOMETRY') geometry_record, new_record = GeometryColumns.objects.get_or_create( f_table_name=information_scheme.table_name, f_geometry_column=information_scheme.column_name, f_table_schema=information_scheme.table_schema, defaults=dict( coord_dimension=coord, srid=srid, type=geom_type, )) if not new_record: geometry_record.coord_dimension = coord geometry_record.srid = srid geometry_record.type = geom_type geometry_record.save()
Example #19
Source File: scag_dm_init.py From urbanfootprint with GNU General Public License v3.0 | 5 votes |
def import_database(self): dct = settings.DATABASES['source'] return dict( host = dct['HOST'], database = dct['NAME'], user = dct['USER'], password = dct['PASSWORD'], )
Example #20
Source File: information_schema.py From urbanfootprint with GNU General Public License v3.0 | 5 votes |
def describe_table_columns(self, cursor, full_table_name): schema, table = parse_schema_and_table(full_table_name) # conn = psycopg2.connect(**pg_connection_parameters(settings.DATABASES['default'])) # conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) # cursor = conn.cursor() cursor.execute(""" SELECT column_name, is_nullable FROM information_schema.columns WHERE table_name = %s and table_schema = %s""", [table, schema]) null_map = dict(cursor.fetchall()) cursor.execute('SELECT * FROM "{schema}"."{table}" LIMIT 1'.format(schema=schema, table=table)) return cursor.description
Example #21
Source File: utils.py From urbanfootprint with GNU General Public License v3.0 | 5 votes |
def drop_db(database_name): """ Drops and recreates the given database, which must be a database that present in the default database server """ # Try to connect db = pg_connection_parameters(settings.DATABASES['default']) conn = psycopg2.connect(**db) cur = conn.cursor() conn.set_isolation_level(0) cur.execute("""DROP DATABASE %s""" % database_name)
Example #22
Source File: tasks.py From lexpredict-contraxsuite with GNU Affero General Public License v3.0 | 5 votes |
def process(self, **kwargs): do_reindex = kwargs.get('reindex') do_vacuum = kwargs.get('vacuum') if do_reindex: with connection.cursor() as cursor: cursor.execute('REINDEX DATABASE {};'.format(settings.DATABASES['default']['NAME'])) if do_vacuum: with connection.cursor() as cursor: cursor.execute('VACUUM ANALYZE;')
Example #23
Source File: managers.py From urbanfootprint with GNU General Public License v3.0 | 5 votes |
def create_schema(self, schema, connection=connection): if not self.schema_exists(schema): logger.info("Creating schema %s" % schema) conn = psycopg2.connect(**pg_connection_parameters(settings.DATABASES['default'])) conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) cursor = connection.cursor() cursor.execute('create schema {0}'.format(schema)) logger.info("Schema %s created" % schema) # This has to be here to create the schema immediately. I don't know why if transaction.is_managed(): transaction.commit() else: logger.info("Schema %s already exists" % schema)
Example #24
Source File: import.py From zulip with Apache License 2.0 | 5 votes |
def handle(self, *args: Any, **options: Any) -> None: num_processes = int(options['processes']) if num_processes < 1: raise CommandError('You must have at least one process.') subdomain = options['subdomain'] if options["destroy_rebuild_database"]: print("Rebuilding the database!") db_name = settings.DATABASES['default']['NAME'] self.do_destroy_and_rebuild_database(db_name) elif options["import_into_nonempty"]: print("NOTE: The argument 'import_into_nonempty' is now the default behavior.") check_subdomain_available(subdomain, from_management_command=True) paths = [] for path in options['export_paths']: path = os.path.realpath(os.path.expanduser(path)) if not os.path.exists(path): raise CommandError(f"Directory not found: '{path}'") if not os.path.isdir(path): raise CommandError("Export file should be folder; if it's a " "tarball, please unpack it first.") paths.append(path) for path in paths: print(f"Processing dump: {path} ...") realm = do_import_realm(path, subdomain, num_processes) print("Checking the system bots.") do_import_system_bots(realm)
Example #25
Source File: clone.py From django-tenants with MIT License | 5 votes |
def _create_clone_schema_function(self): """ Creates a postgres function `clone_schema` that copies a schema and its contents. Will replace any existing `clone_schema` functions owned by the `postgres` superuser. """ cursor = connection.cursor() db_user = settings.DATABASES["default"].get("USER", None) or "postgres" cursor.execute(CLONE_SCHEMA_FUNCTION.format(db_user=db_user)) cursor.close()
Example #26
Source File: s3sqlite_vacuum.py From zappa-django-utils with MIT License | 5 votes |
def handle(self, *args, **options): if settings.DATABASES['default']['ENGINE'] != "zappa_django_utils.db.backends.s3sqlite": raise DatabaseError('This command is only for the s3sqlite Django DB engine.') else: self.stdout.write(self.style.SUCCESS('Starting database VACUUM...')) cursor = connection.cursor() cursor.execute('VACUUM;') cursor.close() self.stdout.write(self.style.SUCCESS('VACUUM complete.'))
Example #27
Source File: clan_view_test.py From rankedftw with GNU Affero General Public License v3.0 | 5 votes |
def request_server(self, data): raw = sc2.direct_ladder_handler_request_clan(settings.DATABASES['default']['NAME'], json.dumps(data)) return json.loads(raw) # Replace client with test client that calls c++ handler directly.
Example #28
Source File: ranking_stats_test.py From rankedftw with GNU Affero General Public License v3.0 | 5 votes |
def setUpClass(self): super(Test, self).setUpClass() self.db = Db() self.db_name = settings.DATABASES['default']['NAME'] # Required objects, not actually used in test cases. self.db.create_cache() self.db.create_ladder() self.t1, self.t2, self.t3 = self.db.create_teams(count=3)
Example #29
Source File: get_teams_ranking_test.py From rankedftw with GNU Affero General Public License v3.0 | 5 votes |
def setUp(self): super().setUp() self.db.clear_defaults() self.db.delete_all(keep=[Season, Ladder, Cache]) self.c = sc2.Get(settings.DATABASES['default']['NAME'], Enums.INFO, 0)
Example #30
Source File: ladder_view_test.py From rankedftw with GNU Affero General Public License v3.0 | 5 votes |
def request_server(self, data): raw = sc2.direct_ladder_handler_request_ladder(settings.DATABASES['default']['NAME'], json.dumps(data)) return json.loads(raw) # Replace client with test client that calls c++ handler directly.