Python rq.Connection() Examples
The following are 28
code examples of rq.Connection().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
rq
, or try the search function
.
Example #1
Source File: views.py From flask-redis-queue with MIT License | 7 votes |
def get_status(task_id): with Connection(redis.from_url(current_app.config["REDIS_URL"])): q = Queue() task = q.fetch_job(task_id) if task: response_object = { "status": "success", "data": { "task_id": task.get_id(), "task_status": task.get_status(), "task_result": task.result, }, } else: response_object = {"status": "error"} return jsonify(response_object)
Example #2
Source File: manage.py From flask-spark-docker with MIT License | 6 votes |
def run_worker(): redis_url = app.config['REDIS_URL'] redis_connection = redis.from_url(redis_url) with Connection(redis_connection): worker = Worker(app.config['QUEUES']) worker.work()
Example #3
Source File: views.py From openci with MIT License | 6 votes |
def grade_project(project_id): project = Project.query.filter_by(id=project_id).first_or_404() with Connection(redis.from_url(current_app.config['REDIS_URL'])): q = Queue() task = q.enqueue( create_task, project.url, current_app.config["OPENFAAS_URL"] ) response_object = { 'status': 'success', 'data': { 'task_id': task.get_id() } } return jsonify(response_object), 202
Example #4
Source File: async_http.py From BTG with GNU General Public License v3.0 | 6 votes |
def request_poller(queue_1, queue_2, nb_to_do): requests = pollout_requests(queue_2, nb_to_do) try: with Connection(Redis()) as conn: q = Queue(queue_1, connection=conn) except: mod.display("ASYNC_HTTP", "ERROR", "Could not establish connection with Redis, check if you have redis_host, \ redis_port and maybe redis_password in /config/config.ini") loop = asyncio.get_event_loop() future = asyncio.ensure_future(run(requests)) x = loop.run_until_complete(future) loop.close() for y in x: if y is not None: try: q.enqueue(module_worker_response, args=(y), result_ttl=0) except: mod.display("ASYNC_HTTP", "ERROR", "Could not enqueue job to Redis in func request_poller")
Example #5
Source File: telemetry.py From gigantum-client with MIT License | 6 votes |
def _calc_rq_free() -> Dict[str, Any]: """Parses the output of `rq info` to return total number of workers and the count of workers currently idle.""" conn = default_redis_conn() with rq.Connection(connection=conn): workers: List[rq.Worker] = [w for w in rq.Worker.all()] idle_workers = [w for w in workers if w.get_state() == 'idle'] resp = { 'workersTotal': len(workers), 'workersIdle': len(idle_workers), 'workersUnknown': len([w for w in workers if w.get_state() == '?']) } queues = 'default', 'build', 'publish' resp.update({f'queue{q.capitalize()}Size': len(rq.Queue(f'gigantum-{q}-queue', connection=conn)) for q in queues}) return resp
Example #6
Source File: worker.py From gigantum-client with MIT License | 6 votes |
def start_rq_worker(queue_name: str, burst: bool = False) -> None: """Start an RQ worker for the given queue. """ try: with Connection(connection=redis.Redis(db=13)): q = Queue(name=queue_name) logger.info(f"Starting {'bursted ' if burst else ''}" f"RQ worker for in {queue_name}") if burst: Worker(q).work(burst=True) else: # This is to bypass a problem when the user closes their laptop # (All the workers time out and die). This should prevent that up until a week. wk_in_secs = 60 * 60 * 24 * 7 Worker(q, default_result_ttl=wk_in_secs, default_worker_ttl=wk_in_secs).work() except Exception as e: logger.exception("Worker in pid {} failed with exception {}".format(os.getpid(), e)) raise
Example #7
Source File: manage.py From openci with MIT License | 5 votes |
def run_worker(): redis_url = app.config['REDIS_URL'] redis_connection = redis.from_url(redis_url) with Connection(redis_connection): worker = Worker(app.config['QUEUES']) worker.work()
Example #8
Source File: views.py From openci with MIT License | 5 votes |
def get_status(project_id, task_id): with Connection(redis.from_url(current_app.config['REDIS_URL'])): q = Queue() task = q.fetch_job(task_id) if task: response_object = { 'status': 'success', 'data': { 'task_id': task.get_id(), 'task_status': task.get_status(), 'task_result': task.result } } if task.get_status() == 'finished': project = Project.query.filter_by(id=project_id).first() project.status = False if bool(task.result['status']): project.status = True db.session.commit() db.session.add( Build( project_id=project.id, status=project.status, datetime=datetime.today().strftime('%d-%m-%Y %H:%M:%S') ) ) db.session.commit() else: response_object = {'status': 'error'} return jsonify(response_object)
Example #9
Source File: poller.py From BTG with GNU General Public License v3.0 | 5 votes |
def time_based_poller(working_queue, request_queue): starttime = time.time() queue_1 = working_queue queue_2 = request_queue while True: redis_host, redis_port, redis_password = init_redis() try: r = redis.StrictRedis(host=redis_host, port=redis_port, password=redis_password) except: mod.display("POLLER", message_type="ERROR", string="Cannot establish connection with Redis in func time_based_poller") try: len = r.llen(queue_2) except: mod.display("POLLER", message_type="ERROR", string="Cannot ask queue: %s length to Redis" % (queue_2)) if len <= 0: time.sleep(1.0 - ((time.time() - starttime) % 1.0)) continue try: with Connection(Redis()) as conn: q = Queue(queue_1, connection=conn) q.enqueue(async_http.request_poller, args=(queue_1, queue_2, len), result_ttl=0) except: mod.display("POLLER", message_type="ERROR", string="Could not establish connection with Redis, check if you have redis_host, \ redis_port and maybe redis_password in /config/config.ini") time.sleep(1.0 - ((time.time() - starttime) % 1.0))
Example #10
Source File: worker.py From OSMDeepOD with MIT License | 5 votes |
def run(self, redis): redis_connection = Redis(redis[0], redis[1], password=redis[2]) with Connection(redis_connection): qs = map(Queue, self.queues) or [Queue()] w = rq.Worker(qs) w.work() print('Items in queue \'{0}\': {1}'.format(self.queues[0], len(qs)))
Example #11
Source File: rq_worker.py From impactstory-tng with MIT License | 5 votes |
def start_worker(queue_name): print "starting worker '{}'...".format(queue_name) with Connection(redis_rq_conn): worker = Worker(Queue(queue_name), exc_handler=failed_job_handler) worker.work()
Example #12
Source File: manage.py From flask-base with MIT License | 5 votes |
def run_worker(): """Initializes a slim rq task queue.""" listen = ['default'] conn = Redis( host=app.config['RQ_DEFAULT_HOST'], port=app.config['RQ_DEFAULT_PORT'], db=0, password=app.config['RQ_DEFAULT_PASSWORD']) with Connection(conn): worker = Worker(map(Queue, listen)) worker.work()
Example #13
Source File: manage.py From penn-club-ratings with MIT License | 5 votes |
def run_worker(): """Initializes a slim rq task queue.""" listen = ['default'] conn = Redis( host=app.config['RQ_DEFAULT_HOST'], port=app.config['RQ_DEFAULT_PORT'], db=0, password=app.config['RQ_DEFAULT_PASSWORD']) with Connection(conn): worker = Worker(map(Queue, listen)) worker.work()
Example #14
Source File: manage.py From BhagavadGita with GNU General Public License v3.0 | 5 votes |
def run_worker(): """Initializes a slim rq task queue.""" listen = ['default'] conn = Redis( host=app.config['RQ_DEFAULT_HOST'], port=app.config['RQ_DEFAULT_PORT'], db=0, password=app.config['RQ_DEFAULT_PASSWORD']) with Connection(conn): worker = Worker(map(Queue, listen)) worker.work()
Example #15
Source File: rq_worker.py From depsy with MIT License | 5 votes |
def start_worker(queue_name): print "starting worker '{}'...".format(queue_name) with Connection(redis_rq_conn): worker = Worker(Queue(queue_name), exc_handler=failed_job_handler) worker.work()
Example #16
Source File: multi_rqworker.py From swift_rpc with MIT License | 5 votes |
def worker(): logging.info('this is worker') with Connection(conn): worker = Worker(map(Queue, listen)) worker.work()
Example #17
Source File: worker.py From Dallinger with MIT License | 5 votes |
def main(): import gevent.monkey gevent.monkey.patch_all() from gevent.queue import LifoQueue # These imports are inside the __main__ block # to make sure that we only import from rq_gevent_worker # (which has the side effect of applying gevent monkey patches) # in the worker process. This way other processes can import the # redis connection without that side effect. import os from redis import BlockingConnectionPool, StrictRedis from rq import Queue, Connection from dallinger.heroku.rq_gevent_worker import GeventWorker as Worker from dallinger.config import initialize_experiment_package initialize_experiment_package(os.getcwd()) import logging logging.basicConfig(format="%(asctime)s %(message)s", level=logging.DEBUG) redis_url = os.getenv("REDIS_URL", "redis://localhost:6379") # Specify queue class for improved performance with gevent. # see http://carsonip.me/posts/10x-faster-python-gevent-redis-connection-pool/ redis_pool = BlockingConnectionPool.from_url(redis_url, queue_class=LifoQueue) redis_conn = StrictRedis(connection_pool=redis_pool) with Connection(redis_conn): worker = Worker(list(map(Queue, listen))) worker.work()
Example #18
Source File: command_line.py From Dallinger with MIT License | 5 votes |
def rq_worker(): """Start an rq worker in the context of dallinger.""" setup_experiment(log) with Connection(redis_conn): # right now we care about low queue for bots worker = Worker("low") worker.work()
Example #19
Source File: worker.py From fairtest with Apache License 2.0 | 5 votes |
def start(redis_connection, queues): with Connection(redis_connection): w = Worker(queues) w.work()
Example #20
Source File: manage.py From flask-rq-example with BSD 2-Clause "Simplified" License | 5 votes |
def runworker(): redis_url = app.config['REDIS_URL'] redis_connection = redis.from_url(redis_url) with Connection(redis_connection): worker = Worker(app.config['QUEUES']) worker.work()
Example #21
Source File: manage.py From flask-redis-queue with MIT License | 5 votes |
def run_worker(): redis_url = app.config["REDIS_URL"] redis_connection = redis.from_url(redis_url) with Connection(redis_connection): worker = Worker(app.config["QUEUES"]) worker.work()
Example #22
Source File: views.py From flask-redis-queue with MIT License | 5 votes |
def run_task(): task_type = request.form["type"] with Connection(redis.from_url(current_app.config["REDIS_URL"])): q = Queue() task = q.enqueue(create_task, task_type) response_object = { "status": "success", "data": { "task_id": task.get_id() } } return jsonify(response_object), 202
Example #23
Source File: api.py From asu with GNU General Public License v2.0 | 5 votes |
def get_queue() -> Queue: """Return the current queue Returns: Queue: The current RQ work queue """ if "queue" not in g: with Connection(): g.queue = Queue(connection=get_redis()) return g.queue
Example #24
Source File: runworker.py From xcessiv with Apache License 2.0 | 5 votes |
def runworker(app): REDIS_HOST = app.config['REDIS_HOST'] REDIS_PORT = app.config['REDIS_PORT'] REDIS_DB = app.config['REDIS_DB'] QUEUES = app.config['QUEUES'] redis_conn = Connection(Redis(REDIS_HOST, REDIS_PORT, REDIS_DB)) with redis_conn: w = Worker(QUEUES) w.work()
Example #25
Source File: worker.py From bugbug with Mozilla Public License 2.0 | 4 votes |
def main(): # Bootstrap the worker assets bugbug_http.boot.boot_worker() # Provide queue names to listen to as arguments to this script, # similar to rq worker redis_url = os.environ.get("REDIS_URL", "redis://localhost/0") redis_conn = Redis.from_url(redis_url) with Connection(connection=redis_conn): qs = sys.argv[1:] or ["default"] w = Worker(qs) w.work()
Example #26
Source File: views.py From xcessiv with Apache License 2.0 | 4 votes |
def create_new_stacked_ensemble(): path = functions.get_path_from_query_string(request) req_body = request.get_json() with functions.DBContextManager(path) as session: if request.method == 'GET': return jsonify( list(map(lambda x: x.serialize, session.query(models.StackedEnsemble).all())) ) if request.method == 'POST': base_learners = session.query(models.BaseLearner).\ filter(models.BaseLearner.id.in_(req_body['base_learner_ids'])).all() if len(base_learners) != len(req_body['base_learner_ids']): raise exceptions.UserError('Not all base learners found') for learner in base_learners: if learner.job_status != 'finished': raise exceptions.UserError('Not all base learners have finished') base_learner_origin = session.query(models.BaseLearnerOrigin).\ filter_by(id=req_body['base_learner_origin_id']).first() if base_learner_origin is None: raise exceptions.UserError('Base learner origin {} not ' 'found'.format(req_body['base_learner_origin_id']), 404) # Retrieve full hyperparameters est = base_learner_origin.return_estimator() params = functions.import_object_from_string_code\ (req_body['secondary_learner_hyperparameters_source'], 'params') est.set_params(**params) hyperparameters = functions.make_serializable(est.get_params()) stacked_ensembles = session.query(models.StackedEnsemble).\ filter_by(base_learner_origin_id=req_body['base_learner_origin_id'], secondary_learner_hyperparameters=hyperparameters, base_learner_ids=sorted([bl.id for bl in base_learners])).all() if stacked_ensembles: raise exceptions.UserError('Stacked ensemble exists') stacked_ensemble = models.StackedEnsemble( secondary_learner_hyperparameters=hyperparameters, base_learners=base_learners, base_learner_origin=base_learner_origin, job_status='queued' ) session.add(stacked_ensemble) session.commit() with Connection(get_redis_connection()): rqtasks.evaluate_stacked_ensemble.delay(path, stacked_ensemble.id) return jsonify(stacked_ensemble.serialize)
Example #27
Source File: views.py From xcessiv with Apache License 2.0 | 4 votes |
def get_automated_runs(): """Return all automated runs""" path = functions.get_path_from_query_string(request) if request.method == 'GET': with functions.DBContextManager(path) as session: automated_runs = session.query(models.AutomatedRun).all() return jsonify(list(map(lambda x: x.serialize, automated_runs))) if request.method == 'POST': req_body = request.get_json() with functions.DBContextManager(path) as session: base_learner_origin = None if req_body['category'] == 'bayes' or req_body['category'] == 'greedy_ensemble_search': base_learner_origin = session.query(models.BaseLearnerOrigin).\ filter_by(id=req_body['base_learner_origin_id']).first() if base_learner_origin is None: raise exceptions.UserError('Base learner origin {} not found'.format( req_body['base_learner_origin_id'] ), 404) if not base_learner_origin.final: raise exceptions.UserError('Base learner origin {} is not final'.format( req_body['base_learner_origin_id'] )) elif req_body['category'] == 'tpot': pass else: raise exceptions.UserError('Automated run category' ' {} not recognized'.format(req_body['category'])) # Check for any syntax errors module = functions.import_string_code_as_module(req_body['source']) del module automated_run = models.AutomatedRun(req_body['source'], 'queued', req_body['category'], base_learner_origin) session.add(automated_run) session.commit() with Connection(get_redis_connection()): rqtasks.start_automated_run.delay(path, automated_run.id) return jsonify(automated_run.serialize)
Example #28
Source File: views.py From xcessiv with Apache License 2.0 | 4 votes |
def create_base_learner(id): """This creates a single base learner from a base learner origin and queues it up""" path = functions.get_path_from_query_string(request) with functions.DBContextManager(path) as session: base_learner_origin = session.query(models.BaseLearnerOrigin).filter_by(id=id).first() if base_learner_origin is None: raise exceptions.UserError('Base learner origin {} not found'.format(id), 404) if not base_learner_origin.final: raise exceptions.UserError('Base learner origin {} is not final'.format(id)) req_body = request.get_json() # Retrieve full hyperparameters est = base_learner_origin.return_estimator() hyperparameters = functions.import_object_from_string_code(req_body['source'], 'params') est.set_params(**hyperparameters) hyperparameters = functions.make_serializable(est.get_params()) base_learners = session.query(models.BaseLearner).\ filter_by(base_learner_origin_id=id, hyperparameters=hyperparameters).all() if base_learners: raise exceptions.UserError('Base learner exists with given hyperparameters') base_learner = models.BaseLearner(hyperparameters, 'queued', base_learner_origin) if 'single_searches' not in base_learner_origin.description: base_learner_origin.description['single_searches'] = [] base_learner_origin.description['single_searches'] += ([req_body['source']]) session.add(base_learner) session.add(base_learner_origin) session.commit() with Connection(get_redis_connection()): rqtasks.generate_meta_features.delay(path, base_learner.id) return jsonify(base_learner.serialize)