Python rq.Worker() Examples
The following are 23
code examples of rq.Worker().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
rq
, or try the search function
.
Example #1
Source File: manage.py From flask-spark-docker with MIT License | 6 votes |
def run_worker(): redis_url = app.config['REDIS_URL'] redis_connection = redis.from_url(redis_url) with Connection(redis_connection): worker = Worker(app.config['QUEUES']) worker.work()
Example #2
Source File: test_rq.py From sentry-python with BSD 2-Clause "Simplified" License | 6 votes |
def test_transport_shutdown(sentry_init, capture_events_forksafe): sentry_init(integrations=[RqIntegration()]) events = capture_events_forksafe() queue = rq.Queue(connection=FakeStrictRedis()) worker = rq.Worker([queue], connection=queue.connection) queue.enqueue(crashing_job, foo=42) worker.work(burst=True) event = events.read_event() events.read_flush() (exception,) = event["exception"]["values"] assert exception["type"] == "ZeroDivisionError"
Example #3
Source File: telemetry.py From gigantum-client with MIT License | 6 votes |
def _calc_rq_free() -> Dict[str, Any]: """Parses the output of `rq info` to return total number of workers and the count of workers currently idle.""" conn = default_redis_conn() with rq.Connection(connection=conn): workers: List[rq.Worker] = [w for w in rq.Worker.all()] idle_workers = [w for w in workers if w.get_state() == 'idle'] resp = { 'workersTotal': len(workers), 'workersIdle': len(idle_workers), 'workersUnknown': len([w for w in workers if w.get_state() == '?']) } queues = 'default', 'build', 'publish' resp.update({f'queue{q.capitalize()}Size': len(rq.Queue(f'gigantum-{q}-queue', connection=conn)) for q in queues}) return resp
Example #4
Source File: queue.py From freight with Apache License 2.0 | 6 votes |
def get_worker(self, listen=()): if not listen: listen = self.config["queues"] def send_to_sentry(job, *exc_info): self.sentry.captureException( exc_info=exc_info, extra={ "job_id": job.id, "func": job.__name__, "args": job.args, "kwargs": job.kwargs, "description": job.description, }, ) exception_handlers = [send_to_sentry] return Worker( [QueueType(k, connection=self.connection) for k in listen], exception_handlers=exception_handlers, connection=self.connection, )
Example #5
Source File: runworker.py From xcessiv with Apache License 2.0 | 5 votes |
def runworker(app): REDIS_HOST = app.config['REDIS_HOST'] REDIS_PORT = app.config['REDIS_PORT'] REDIS_DB = app.config['REDIS_DB'] QUEUES = app.config['QUEUES'] redis_conn = Connection(Redis(REDIS_HOST, REDIS_PORT, REDIS_DB)) with redis_conn: w = Worker(QUEUES) w.work()
Example #6
Source File: manage.py From openci with MIT License | 5 votes |
def run_worker(): redis_url = app.config['REDIS_URL'] redis_connection = redis.from_url(redis_url) with Connection(redis_connection): worker = Worker(app.config['QUEUES']) worker.work()
Example #7
Source File: worker.py From OSMDeepOD with MIT License | 5 votes |
def run(self, redis): redis_connection = Redis(redis[0], redis[1], password=redis[2]) with Connection(redis_connection): qs = map(Queue, self.queues) or [Queue()] w = rq.Worker(qs) w.work() print('Items in queue \'{0}\': {1}'.format(self.queues[0], len(qs)))
Example #8
Source File: rq_worker.py From impactstory-tng with MIT License | 5 votes |
def start_worker(queue_name): print "starting worker '{}'...".format(queue_name) with Connection(redis_rq_conn): worker = Worker(Queue(queue_name), exc_handler=failed_job_handler) worker.work()
Example #9
Source File: manage.py From flask-base with MIT License | 5 votes |
def run_worker(): """Initializes a slim rq task queue.""" listen = ['default'] conn = Redis( host=app.config['RQ_DEFAULT_HOST'], port=app.config['RQ_DEFAULT_PORT'], db=0, password=app.config['RQ_DEFAULT_PASSWORD']) with Connection(conn): worker = Worker(map(Queue, listen)) worker.work()
Example #10
Source File: manage.py From penn-club-ratings with MIT License | 5 votes |
def run_worker(): """Initializes a slim rq task queue.""" listen = ['default'] conn = Redis( host=app.config['RQ_DEFAULT_HOST'], port=app.config['RQ_DEFAULT_PORT'], db=0, password=app.config['RQ_DEFAULT_PASSWORD']) with Connection(conn): worker = Worker(map(Queue, listen)) worker.work()
Example #11
Source File: manage.py From BhagavadGita with GNU General Public License v3.0 | 5 votes |
def run_worker(): """Initializes a slim rq task queue.""" listen = ['default'] conn = Redis( host=app.config['RQ_DEFAULT_HOST'], port=app.config['RQ_DEFAULT_PORT'], db=0, password=app.config['RQ_DEFAULT_PASSWORD']) with Connection(conn): worker = Worker(map(Queue, listen)) worker.work()
Example #12
Source File: rq_worker.py From depsy with MIT License | 5 votes |
def start_worker(queue_name): print "starting worker '{}'...".format(queue_name) with Connection(redis_rq_conn): worker = Worker(Queue(queue_name), exc_handler=failed_job_handler) worker.work()
Example #13
Source File: custom_worker.py From django-test-rq with The Unlicense | 5 votes |
def handle(self, *args, **options): redis_conn = django_rq.get_connection('default') q = Queue(settings.DJANGO_TEST_RQ_LOW_QUEUE, connection=redis_conn) worker = Worker([q], exc_handler=my_handler, connection=redis_conn) worker.work()
Example #14
Source File: worker.py From renku-python with Apache License 2.0 | 5 votes |
def worker(queue_list): """Creates worker object.""" def build_worker(): """Build worker.""" log_level = os.getenv('RQ_WORKER_LOG_LEVEL', 'WARNING') setup_loghandlers(log_level) log.info('worker log level set to {}'.format(log_level)) rq_worker = Worker(queue_list, connection=WorkerQueues.connection) log.info('worker created') return rq_worker yield build_worker()
Example #15
Source File: multi_rqworker.py From swift_rpc with MIT License | 5 votes |
def worker(): logging.info('this is worker') with Connection(conn): worker = Worker(map(Queue, listen)) worker.work()
Example #16
Source File: command_line.py From Dallinger with MIT License | 5 votes |
def rq_worker(): """Start an rq worker in the context of dallinger.""" setup_experiment(log) with Connection(redis_conn): # right now we care about low queue for bots worker = Worker("low") worker.work()
Example #17
Source File: command_line.py From Dallinger with MIT License | 5 votes |
def compensate(recruiter, worker_id, email, dollars, sandbox): """Credit a specific worker by ID through their recruiter""" out = Output() config = get_config() config.load() mode = "sandbox" if sandbox else "live" do_notify = email is not None no_email_str = "" if email else " NOT" with config.override({"mode": mode}): rec = by_name(recruiter) if not click.confirm( '\n\nYou are about to pay worker "{}" ${:.2f} in "{}" mode using the "{}" recruiter.\n' "The worker will{} be notified by email. " "Continue?".format(worker_id, dollars, mode, recruiter, no_email_str) ): out.log("Aborting...") return try: result = rec.compensate_worker( worker_id=worker_id, email=email, dollars=dollars, notify=do_notify ) except Exception as ex: out.error( "Compensation failed. The recruiter reports the following error:\n{}".format( ex ), delay=0, ) return out.log("HIT Details", delay=0) out.log(tabulate.tabulate(result["hit"].items()), chevrons=False, delay=0) out.log("Qualification Details", delay=0) out.log(tabulate.tabulate(result["qualification"].items()), chevrons=False, delay=0) out.log("Worker Notification", delay=0) out.log(tabulate.tabulate(result["email"].items()), chevrons=False, delay=0)
Example #18
Source File: worker.py From fairtest with Apache License 2.0 | 5 votes |
def start(redis_connection, queues): with Connection(redis_connection): w = Worker(queues) w.work()
Example #19
Source File: tracing.py From sentry-python with BSD 2-Clause "Simplified" License | 5 votes |
def run_worker(): print("WORKING") worker = rq.Worker([queue], connection=queue.connection) worker.work()
Example #20
Source File: manage.py From flask-rq-example with BSD 2-Clause "Simplified" License | 5 votes |
def runworker(): redis_url = app.config['REDIS_URL'] redis_connection = redis.from_url(redis_url) with Connection(redis_connection): worker = Worker(app.config['QUEUES']) worker.work()
Example #21
Source File: manage.py From flask-redis-queue with MIT License | 5 votes |
def run_worker(): redis_url = app.config["REDIS_URL"] redis_connection = redis.from_url(redis_url) with Connection(redis_connection): worker = Worker(app.config["QUEUES"]) worker.work()
Example #22
Source File: worker.py From rq-win with MIT License | 4 votes |
def perform_job(self, job, queue, heartbeat_ttl=None): """Performs the actual work of a job. Will/should only be called inside the work horse's process. """ self.prepare_job_execution(job, heartbeat_ttl) self.procline('Processing %s from %s since %s' % ( job.func_name, job.origin, time.time())) try: job.started_at = times.now() # I have DISABLED the time limit! rv = job.perform() # Pickle the result in the same try-except block since we need to # use the same exc handling when pickling fails job._result = rv job._status = rq.job.JobStatus.FINISHED job.ended_at = times.now() # # Using the code from Worker.handle_job_success # with self.connection.pipeline() as pipeline: pipeline.watch(job.dependents_key) queue.enqueue_dependents(job, pipeline=pipeline) self.set_current_job_id(None, pipeline=pipeline) self.increment_successful_job_count(pipeline=pipeline) result_ttl = job.get_result_ttl(self.default_result_ttl) if result_ttl != 0: job.save(pipeline=pipeline, include_meta=False) job.cleanup(result_ttl, pipeline=pipeline, remove_from_queue=False) pipeline.execute() except: # Use the public setter here, to immediately update Redis job.status = rq.job.JobStatus.FAILED self.handle_exception(job, *sys.exc_info()) return False if rv is None: self.log.info('Job OK') else: self.log.info('Job OK, result = %s' % (rq.worker.yellow(rq.compat.text_type(rv)),)) if result_ttl == 0: self.log.info('Result discarded immediately.') elif result_ttl > 0: self.log.info('Result is kept for %d seconds.' % result_ttl) else: self.log.warning('Result will never expire, clean up result key manually.') return True
Example #23
Source File: worker.py From bugbug with Mozilla Public License 2.0 | 4 votes |
def main(): # Bootstrap the worker assets bugbug_http.boot.boot_worker() # Provide queue names to listen to as arguments to this script, # similar to rq worker redis_url = os.environ.get("REDIS_URL", "redis://localhost/0") redis_conn = Redis.from_url(redis_url) with Connection(connection=redis_conn): qs = sys.argv[1:] or ["default"] w = Worker(qs) w.work()