Python django.conf.settings.REDIS_HOST Examples
The following are 11
code examples of django.conf.settings.REDIS_HOST().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
django.conf.settings
, or try the search function
.
Example #1
Source File: dashboard.py From DevOps with GNU General Public License v2.0 | 7 votes |
def get(self, request, *args, **kwargs): connect = redis.StrictRedis( host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=settings.REDIS_SPACE, password=settings.REDIS_PASSWD ) week_list = ['Won', 'Tue', 'Wed', 'Thur', 'Fri', 'Sat', 'Sun'] TEMP = connect.hgetall('WORK',) WORK = [] for key in week_list: WORK.append({ 'time': str(key, encoding='utf-8'), '执行次数': TEMP[key] }) return Response( {'title': '一周内工单执行','dataset': WORK} or {}, status.HTTP_200_OK )
Example #2
Source File: dashboard.py From DevOps with GNU General Public License v2.0 | 6 votes |
def get(self, request, *args, **kwargs): connect = redis.StrictRedis( host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=settings.REDIS_SPACE, password=settings.REDIS_PASSWD ) TEMP = connect.hgetall('GROUP',) GROUP = [ ['主机数目','count'], ] for key in TEMP: GROUP.append([str(key, encoding='utf-8'), int(TEMP[key])]) return Response( {'title': '主机统计', 'dataset': GROUP} or {}, status.HTTP_200_OK )
Example #3
Source File: cache.py From koku with GNU Affero General Public License v3.0 | 5 votes |
def invalidate_view_cache_for_tenant_and_cache_key(schema_name, cache_key_prefix=None): """Invalidate our view cache for a specific tenant and source type. If cache_key_prefix is None, all views will be invalidated. """ cache = caches["default"] if isinstance(cache, RedisCache): cache = Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=settings.REDIS_DB) all_keys = cache.keys("*") all_keys = [key.decode("utf-8") for key in all_keys] elif isinstance(cache, LocMemCache): all_keys = list(locmem._caches.get(settings.TEST_CACHE_LOCATION).keys()) all_keys = [key.split(":", 2)[-1] for key in all_keys] else: msg = "Using an unsupported caching backend!" raise KokuCacheError(msg) all_keys = all_keys if all_keys is not None else [] if cache_key_prefix: keys_to_invalidate = [key for key in all_keys if (schema_name in key and cache_key_prefix in key)] else: # Invalidate all cached views for the tenant keys_to_invalidate = [key for key in all_keys if schema_name in key] for key in keys_to_invalidate: cache.delete(key) msg = f"Invalidated request cache for\n\ttenant: {schema_name}\n\tcache_key_prefix: {cache_key_prefix}" LOG.info(msg)
Example #4
Source File: views.py From kobo-predict with BSD 2-Clause "Simplified" License | 5 votes |
def municipality_data(request): # r = redis.StrictRedis(host=settings.REDIS_HOST, port=6379, db=3) # data = r.hgetall("municipality") data = generate_municipality_data() return Response(data.values())
Example #5
Source File: client.py From KortURL with MIT License | 5 votes |
def __init__(self): self._pool = redis.ConnectionPool(host=settings.REDIS_HOST, port=settings.REDIS_PORT, decode_responses=True, db=settings.MAP_CACHE_DB, password=settings.REDIS_PASSWORD) self.client = redis.Redis(connection_pool=self._pool)
Example #6
Source File: redis_utils.py From zulip with Apache License 2.0 | 5 votes |
def get_redis_client() -> redis.StrictRedis: return redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, password=settings.REDIS_PASSWORD, db=0)
Example #7
Source File: redis_store.py From freesound-datasets with GNU Affero General Public License v3.0 | 5 votes |
def __init__(self, verbose=False): self.r = redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=0) self.verbose = verbose
Example #8
Source File: dashboard.py From DevOps with GNU General Public License v2.0 | 5 votes |
def get(self, request, *args, **kwargs): connect = redis.StrictRedis( host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=settings.REDIS_SPACE, password=settings.REDIS_PASSWD ) TEMP = connect.hgetall('COUNT',) COUNT = {} for key in TEMP: COUNT[str(key, encoding='utf-8')] = TEMP[key] return Response( COUNT or {}, status.HTTP_200_OK )
Example #9
Source File: channels.py From canvas with BSD 3-Clause "New" or "Revised" License | 5 votes |
def connect(self): cc = lambda *args: protocol.ClientCreator(reactor, *args) self.redis_sub = RedisDispatch(settings.REDIS_HOST, settings.REDIS_PORT) redis_factory = RedisServiceRegisteringFactory(self) reactor.connectTCP(settings.REDIS_HOST, settings.REDIS_PORT, redis_factory) yield redis_factory.deferred
Example #10
Source File: pwdmanager.py From freeipa-password-reset with GNU General Public License v3.0 | 5 votes |
def __init__(self): if self.__kerberos_has_ticket() is False: self.__kerberos_init() if api.isdone('finalize') is False: api.bootstrap_with_global_options(context='api') api.finalize() api.Backend.rpcclient.connect() self.redis = redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=settings.REDIS_DB, password=settings.REDIS_PASSWORD)
Example #11
Source File: results_storm.py From cognitive with Apache License 2.0 | 4 votes |
def __init__(self, thread_id, name, experiment, component_id, max_results, cache_results): threading.Thread.__init__(self) self.threadID = thread_id self.name = name self.experiment = experiment self.comp_id = component_id self.result = {} self.max_results = max_results self.cache_results = cache_results print "Submitting topology to storm. End component", self.comp_id exp = Experiment.objects.get(pk=self.experiment) graph = exp.workflow.graph_data graph_data = {} print graph tmp = graph.split(',') for elem in tmp: first_node = elem.split(":")[0] second_node = elem.split(":")[1] if second_node in graph_data: depend_nodes = graph_data[second_node] depend_nodes.add(first_node) else: graph_data[second_node] = set() graph_data[second_node].add(first_node) topological_graph = toposort_flatten(graph_data) print "Graph after topological sort", topological_graph message = { 'exp_id': self.experiment, 'result': self.comp_id, 'graph': topological_graph, 'components': defaultdict()} for data in topological_graph: component_id = int(data) comp = Component.objects.get(pk=component_id) if comp.operation_type.function_type == 'Create': if comp.operation_type.function_arg == 'Table': filename = comp.operation_type.function_subtype_arg input_data = read_csv(filename) message['input'] = {} for elem in list(input_data.columns): message['input'][elem] = list(input_data[elem]) message['cols'] = list(input_data.columns) # message['input'] = input_data.to_dict() serialized_obj = serializers.serialize('json', [comp.operation_type, ]) print "Component_id", component_id, " ", comp.operation_type message['components'][data] = serialized_obj print "Message ", message r = redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=0) self.pubsub = r.pubsub(ignore_subscribe_messages=True) self.pubsub.subscribe("Exp " + str(self.experiment)) ret = r.publish('workflow', json.dumps(message)) print "return", ret