Python apscheduler.schedulers.blocking.BlockingScheduler() Examples

The following are 12 code examples of apscheduler.schedulers.blocking.BlockingScheduler(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module apscheduler.schedulers.blocking , or try the search function .
Example #1
Source File: __init__.py    From cloud-inquisitor with Apache License 2.0 6 votes vote down vote up
def __init__(self):
        """Initialize the SQSScheduler, setting up the process pools, scheduler and connecting to the required
        SQS Queues"""
        super().__init__()

        self.pool = ProcessPoolExecutor(1)
        self.scheduler = APScheduler(
            threadpool=self.pool,
            job_defaults={
                'coalesce': True,
                'misfire_grace_time': 30
            }
        )

        session = get_local_aws_session()
        sqs = session.resource('sqs', self.dbconfig.get('queue_region', self.ns))

        self.job_queue = sqs.Queue(self.dbconfig.get('job_queue_url', self.ns))
        self.status_queue = sqs.Queue(self.dbconfig.get('status_queue_url', self.ns)) 
Example #2
Source File: __init__.py    From cloud-inquisitor with Apache License 2.0 6 votes vote down vote up
def __init__(self):
        super().__init__()
        self.collectors = {}
        self.auditors = []
        self.region_workers = []

        self.pool = ProcessPoolExecutor(self.dbconfig.get('worker_threads', self.ns, 20))
        self.scheduler = APScheduler(
            threadpool=self.pool,
            job_defaults={
                'coalesce': True,
                'misfire_grace_time': 30
            }
        )

        self.load_plugins() 
Example #3
Source File: scheduler.py    From GovLens with MIT License 6 votes vote down vote up
def scrape_scheduled_method(self):
        self.job_execution_counter = self.job_execution_counter + 1
        print(
            f"Executing the {self.job_execution_counter} job. {self.queue_size - self.job_execution_counter} to be executed at {str(datetime.now().strftime('%Y-%m-%d %H:%M:%S'))}"
        )
        if self.job_queue.empty() is False:
            agencies = self.job_queue.get()
            loop = asyncio.new_event_loop()
            asyncio.set_event_loop(loop)
            loop.run_until_complete(self.scraper_service.scrape_data(agencies))
            scheduler = BlockingScheduler()
            scheduler.add_job(
                self.scrape_scheduled_method,
                next_run_time=datetime.now()
                + timedelta(seconds=self.interval_between_runs_seconds),
            )
            scheduler.start()
        else:
            print(
                f"done with scraping at {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
            ) 
Example #4
Source File: workers.py    From archon with MIT License 6 votes vote down vote up
def __init__(self, broker, interval=10):
        """ Constructor
        :type interval: int
        :param interval: Check interval, in seconds
        """
        self.broker = broker
        self.interval = interval

        setup_logger(logger_name=__name__, log_file=__name__ + '.log')
        self.logger = logging.getLogger(__name__)

        #thread = threading.Thread(target=self.run, args=())
        #thread.daemon = True                            # Daemonize thread
        #thread.start()                                  # Start the execution

        scheduler = BlockingScheduler()
        scheduler.add_job(self.sync_job, 'interval', seconds=10)
        scheduler.start() 
Example #5
Source File: scheduler.py    From proxy_pool with MIT License 6 votes vote down vote up
def runScheduler():
    runProxyFetch()

    scheduler_log = LogHandler("scheduler")
    scheduler = BlockingScheduler(logger=scheduler_log)

    scheduler.add_job(runProxyFetch, 'interval', minutes=4, id="proxy_fetch", name="proxy采集")
    scheduler.add_job(runProxyCheck, 'interval', minutes=2, id="proxy_check", name="proxy检查")

    executors = {
        'default': {'type': 'threadpool', 'max_workers': 20},
        'processpool': ProcessPoolExecutor(max_workers=5)
    }
    job_defaults = {
        'coalesce': False,
        'max_instances': 10
    }

    scheduler.configure(executors=executors, job_defaults=job_defaults)

    scheduler.start() 
Example #6
Source File: __init__.py    From cloud-inquisitor with Apache License 2.0 5 votes vote down vote up
def list_current_jobs(self):
        """Return a list of the currently scheduled jobs in APScheduler

        Returns:
            `dict` of `str`: :obj:`apscheduler/job:Job`
        """
        jobs = {}
        for job in self.scheduler.get_jobs():
            if job.name not in ('schedule_jobs', 'process_status_queue'):
                jobs[job.name] = job

        return jobs 
Example #7
Source File: okcoin.py    From bitcoin-price-prediction with MIT License 5 votes vote down vote up
def main():
    """Run tick() at the interval of every ten seconds."""
    scheduler = BlockingScheduler(timezone=utc)
    scheduler.add_job(tick, 'interval', seconds=10)
    try:
        scheduler.start()
    except (KeyboardInterrupt, SystemExit):
        pass 
Example #8
Source File: schdule.py    From Python-notes with MIT License 5 votes vote down vote up
def blocking_schedule():
    from apscheduler.schedulers.blocking import BlockingScheduler

    def tick():
        print('Tick! The time is: %s' % datetime.now())

    scheduler = BlockingScheduler()
    scheduler.add_job(tick, 'interval', seconds=3)
    print('Press Ctrl+{0} to exit'.format('Break' if os.name == 'nt' else 'C'))

    try:
        scheduler.start()
    except (KeyboardInterrupt, SystemExit):
        pass 
Example #9
Source File: websocket_watchdog.py    From huobi_Python with Apache License 2.0 5 votes vote down vote up
def __init__(self, is_auto_connect=True, heart_beat_limit_ms=CONNECT_HEART_BEAT_LIMIT_MS, reconnect_after_ms=RECONNECT_AFTER_TIME_MS):
        threading.Thread.__init__(self)
        self.is_auto_connect = is_auto_connect
        self.heart_beat_limit_ms = heart_beat_limit_ms
        self.reconnect_after_ms = reconnect_after_ms if reconnect_after_ms > heart_beat_limit_ms else heart_beat_limit_ms
        self.logger = logging.getLogger("huobi-client")
        self.scheduler = BlockingScheduler()
        self.scheduler.add_job(watch_dog_job, "interval", max_instances=10, seconds=1, args=[self])
        self.start() 
Example #10
Source File: base_test.py    From ndscheduler with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def setUp(self):
        fake_scheduler = BlockingScheduler()
        self.store = DatastoreSqlite.get_instance()
        self.store.start(fake_scheduler, None) 
Example #11
Source File: ProxyScheduler.py    From spider with MIT License 5 votes vote down vote up
def runScheduler():
    rawProxyScheduler()
    usefulProxyScheduler()

    scheduler_log = LogHandler("scheduler_log")
    scheduler = BlockingScheduler(logger=scheduler_log)

    scheduler.add_job(rawProxyScheduler, 'interval', minutes=5, id="raw_proxy_check", name="raw_proxy定时采集")
    scheduler.add_job(usefulProxyScheduler, 'interval', minutes=1, id="useful_proxy_check", name="useful_proxy定时检查")

    scheduler.start() 
Example #12
Source File: scheduler.py    From destalinator with Apache License 2.0 5 votes vote down vote up
def schedule_job():
    # When testing changes, set the "TEST_SCHEDULE" envvar to run more often
    if get_config().test_schedule:
        schedule_kwargs = {"hour": "*", "minute": "*/10"}
    else:
        schedule_kwargs = {"hour": get_config().schedule_hour}

    sched = BlockingScheduler()
    sched.add_job(destalinate_job, "cron", **schedule_kwargs)
    sched.start()