Python apscheduler.schedulers.background.BackgroundScheduler() Examples

The following are 30 code examples of apscheduler.schedulers.background.BackgroundScheduler(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module apscheduler.schedulers.background , or try the search function .
Example #1
Source File: schdule.py    From Python-notes with MIT License 12 votes vote down vote up
def background_schedule():
    from apscheduler.schedulers.background import BackgroundScheduler

    def tick():
        print('Tick! The time is: %s' % datetime.now())

    scheduler = BackgroundScheduler()
    scheduler.add_job(tick, 'interval', seconds=3)
    scheduler.start()
    print('Press Ctrl+{0} to exit'.format('Break' if os.name == 'nt' else 'C'))

    try:
        # This is here to simulate application activity (which keeps the main thread alive).
        while True:
            time.sleep(2)
    except (KeyboardInterrupt, SystemExit):
        # Not strictly necessary if daemonic mode is enabled but should be done if possible
        scheduler.shutdown() 
Example #2
Source File: scheduler_helper.py    From resilient-community-apps with MIT License 7 votes vote down vote up
def __init__(self, datastore_dir, threat_max, timezone):
        global _scheduler_

        self.timezone = timezone

        lock = threading.Lock()
        with lock:
            if not _scheduler_:
                jobstores = {
                    'default': SQLAlchemyJobStore(url='sqlite:///{}/scheduler.sqlite'.format(datastore_dir)),
                }
                executors = {
                    'default': ThreadPoolExecutor(threat_max),
                }
                job_defaults = {
                    'coalesce': False,
                    'max_instances': 1
                }
                _scheduler_ = BackgroundScheduler(
                    jobstores=jobstores, executors=executors,
                    job_defaults=job_defaults, timezone=timezone
                )
                _scheduler_.start() 
Example #3
Source File: scheduler.py    From bazarr with GNU General Public License v3.0 7 votes vote down vote up
def __init__(self):
        self.__running_tasks = []

        self.aps_scheduler = BackgroundScheduler()

        # task listener
        def task_listener_add(event):
            if event.job_id not in self.__running_tasks:
                self.__running_tasks.append(event.job_id)

        def task_listener_remove(event):
            if event.job_id in self.__running_tasks:
                self.__running_tasks.remove(event.job_id)

        self.aps_scheduler.add_listener(task_listener_add, EVENT_JOB_SUBMITTED)
        self.aps_scheduler.add_listener(task_listener_remove, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR)

        # configure all tasks
        self.__sonarr_update_task()
        self.__radarr_update_task()
        self.__cache_cleanup_task()
        self.update_configurable_tasks()

        self.aps_scheduler.start() 
Example #4
Source File: competitions.py    From superCodingBot with MIT License 6 votes vote down vote up
def __init__(self, clist_user_name, clist_api_key, mount_point, bot, fallback):
        self.clist_user_name = clist_user_name
        self.clist_api_key = clist_api_key
        self.bot = bot
        self.ong = None
        self.upc = None
        self.mount_point = mount_point
        self.utility = ContestUtility(mount_point)
        self.jobstores = {
            'default': SQLAlchemyJobStore(url='sqlite:///' + mount_point + 'coders1.db')
        }
        self.schedule = BackgroundScheduler(jobstores=self.jobstores)
        self.schedule.start()
        self.conv_handler = ConversationHandler(
            entry_points=[CommandHandler('upcoming', self.upcoming)],
            allow_reentry=True,
            states={
                SCHED: [CallbackQueryHandler(self.remind, pattern=r"^[0-9]*$")]
            },
            fallbacks=[fallback]
        )
        self.conv_handler1 = ConversationHandler(
            entry_points=[CommandHandler('dontRemindMe', self.removeRemind)],
            allow_reentry=True,
            states={
                REMNOTI: [CallbackQueryHandler(self.remnoti, pattern=r'^.*notiplz.*$')]
            },

            fallbacks=[fallback]
        ) 
Example #5
Source File: monitor.py    From loggrove with MIT License 6 votes vote down vote up
def main():
    logging.info('Loggrove-Monitor running...')
    print('Loggrove-Monitor running...')
    make_logfile_thread = threading.Thread(target=make_logfiles, daemon=True)  # 每60s 获取主机日志及监控项
    make_logfile_thread.start()
    send_alert_thread = threading.Thread(target=send_alert, daemon=True)       # 消费报警队列,进行告警
    send_alert_thread.start()
    monitor_report_thread = threading.Thread(target=monitor_report, daemon=True)    # 消费日志统计队列,上报给loggrove
    monitor_report_thread.start()

    scheduler = BackgroundScheduler()
    scheduler.add_job(monitor_basic, 'cron', minute='*/1', max_instances=8)  # 监控、统计、检查
    scheduler.start()

    while send_alert_thread.is_alive() and make_logfile_thread.is_alive() and monitor_report_thread.is_alive():
        time.sleep(5)
    logging.info('Loggrove-Monitor exit...')
    print('Loggrove-Monitor exit...') 
Example #6
Source File: run_sources.py    From darklight with Apache License 2.0 6 votes vote down vote up
def main():
    """Main method for running all sources."""
    scheduler = BackgroundScheduler()
    scheduler.start()

    Log.i("{} source(s) detected!".format(len(sources.__all__)))

    job_id = 1

    for source in sources.__all__:
        status = run(source)  # initial run source.

        if status:
            # register a scheduler for running periodically. (only for active source)
            scheduler.add_job(run, "interval",
                              minutes=source().cycle, id=str(job_id),
                              args=(source, ))
            Log.i("Successfully add a new job")

            job_id += 1

    while True:
        time.sleep(60)  # sleep 1 mintue for running scheduler normally.

    scheduler.shutdown() 
Example #7
Source File: main.py    From Yugioh-bot with MIT License 6 votes vote down vote up
def setup_runtime(uconfig):
    from bot.duel_links_runtime import DuelLinkRunTime
    from bot import logger
    from bot.providers import get_provider
    os.makedirs(uconfig.get('locations', 'log'), exist_ok=True)
    setup_logging()
    scheduler = BackgroundScheduler()
    dlRuntime = DuelLinkRunTime(uconfig, scheduler)
    dlRuntime.stop = False  # Need to Ensure that it runs
    scheduler.start()
    try:
        dlRuntime.set_provider(get_provider(uconfig.get('bot', 'provider'))(scheduler, uconfig, dlRuntime))
    except Exception as e:
        logger.critical("Could not get a provider, take a look at your config file")
        logger.critical(e)
        logger.debug(traceback.format_exc())
        sys.exit(1)
    try:
        dlRuntime.get_provider().sleep_factor = uconfig.getint('bot', 'sleep_factor')
    except Exception as e:
        logger.critical("Could not set sleep factor, take a look at your config file")
        logger.critical(e)
        sys.exit(1)
    return dlRuntime 
Example #8
Source File: notifier.py    From jarvis with GNU General Public License v2.0 6 votes vote down vote up
def __init__(self, profile):
        self._logger = logging.getLogger(__name__)
        self.q = Queue.Queue()
        self.profile = profile
        self.notifiers = []

        if 'gmail_address' in profile and 'gmail_password' in profile:
            self.notifiers.append(self.NotificationClient(
                self.handle_email_notifications, None))
        else:
            self._logger.warning('gmail_address or gmail_password not set ' +
                                 'in profile, Gmail notifier will not be used')

        sched = BackgroundScheduler(timezone="UTC", daemon=True)
        sched.start()
        sched.add_job(self.gather, 'interval', seconds=30)
        atexit.register(lambda: sched.shutdown(wait=False)) 
Example #9
Source File: scheduler_manager.py    From piclodio3 with MIT License 5 votes vote down vote up
def __init__(self):
        print("Initialisation of the scheduler manager")

        self.scheduler = BackgroundScheduler()
        # create the async loop in the main thread
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)  # bind event loop to current thread
        asyncio.get_child_watcher().attach_loop(self.loop) 
Example #10
Source File: scheduler.py    From AutoYtB with The Unlicense 5 votes vote down vote up
def __init__():
    global g_main_scheduler
    g_main_scheduler = BackgroundScheduler(timezone=utc)
    g_main_scheduler.add_jobstore('sqlalchemy', url='sqlite:///jobs.sqlite')
    g_main_scheduler.start()

    log_jobs()

    import threading
    myLogger("g_main_scheduler in this thread:{}".format(threading.current_thread())) 
Example #11
Source File: schedule.py    From mlcomp with Apache License 2.0 5 votes vote down vote up
def start_schedule(jobs):
    scheduler = BackgroundScheduler()
    for func, interval in jobs:
        scheduler.add_job(func=func, trigger='interval', seconds=interval,
                          max_instances=1)
    scheduler.start()

    # Shut down the scheduler when exiting the app
    atexit.register(lambda: scheduler.shutdown()) 
Example #12
Source File: scheduling.py    From slack-machine with MIT License 5 votes vote down vote up
def __init__(self):
        _settings, _ = import_settings()
        self._scheduler = BackgroundScheduler()
        if 'REDIS_URL' in _settings:
            redis_config = gen_config_dict(_settings)
            self._scheduler.add_jobstore('redis', **redis_config) 
Example #13
Source File: extensions.py    From saltshaker_api with MIT License 5 votes vote down vote up
def _main_loop(self):
        wait_seconds = TIMEOUT_MAX
        while self.state != STATE_STOPPED:
            self._event.wait(wait_seconds)
            self._event.clear()
            wait_seconds = self._process_jobs()


# 重写BackgroundScheduler类的start方法,使其调用MutexBlockingScheduler类_main_loop方法
# 必须要继承MutexBlockingScheduler类 
Example #14
Source File: main.py    From EverydayWechat with MIT License 5 votes vote down vote up
def init_alarm(alarm_dict):
    """
    初始化定时任务
    :param alarm_dict: 定时相关内容
    """
    # 定时任务
    scheduler = BackgroundScheduler()
    for key, value in alarm_dict.items():
        scheduler.add_job(send_alarm_msg, 'cron', [key], hour=value['hour'],
                          minute=value['minute'], id=key, misfire_grace_time=600, jitter=value.get("alarm_jitter",0))
    scheduler.start()

    # print('已开启定时发送提醒功能...')
    # print(scheduler.get_jobs()) 
Example #15
Source File: hipposched.py    From Hippocampe with GNU Affero General Public License v3.0 5 votes vote down vote up
def main(request):
	logger.info('hipposched.main launched')
	try:
		jobId = 'shadowbook'
		time = request['time']
		logger.info('schedule submitted as: ' + str(time))
		#processing time to isolate each field
		tabTime = time.split(' ')
		#tabTime
		#	[0] min
		#	[1] hour
		#	[2] day of month
		#	[3] month
		#	[4] day of week
		sched = BackgroundScheduler()
		#always erase the previous schedule
		#because of replace_existing = True
		#logger.info('creating job')
		sched.add_job(shadowbook.hipposchedVersion,
			'cron',
			minute = tabTime[0],
			hour = tabTime[1],
			day = tabTime[2],
			month = tabTime[3],
			day_of_week = tabTime[4],
			replace_existing = True,
			id = jobId)
		sched.start()
		logger.info('job succesfully schedulled as: ' + str(time))
		report = dict()
		report['schedule'] = time
		logger.info('hipposched.main end')
		return report
	except Exception as e:
		logger.error('hipposched.main failed, no idea where it came from', exc_info = True)
		report = dict()
		report['error'] = str(e)
		return report 
Example #16
Source File: __init__.py    From Tautulli with GNU General Public License v3.0 5 votes vote down vote up
def start():
    global _STARTED

    if _INITIALIZED:
        global SCHED
        SCHED = BackgroundScheduler(timezone=pytz.UTC)
        activity_handler.ACTIVITY_SCHED = BackgroundScheduler(timezone=pytz.UTC)
        newsletter_handler.NEWSLETTER_SCHED = BackgroundScheduler(timezone=pytz.UTC)

        # Start the scheduler for stale stream callbacks
        activity_handler.ACTIVITY_SCHED.start()

        # Start background notification thread
        notification_handler.start_threads(num_threads=CONFIG.NOTIFICATION_THREADS)
        notifiers.check_browser_enabled()

        if CONFIG.FIRST_RUN_COMPLETE:
            activity_pinger.connect_server(log=True, startup=True)

        if CONFIG.SYSTEM_ANALYTICS:
            global TRACKER
            TRACKER = initialize_tracker()

            # Send system analytics events
            if not CONFIG.FIRST_RUN_COMPLETE:
                analytics_event(category='system', action='install')

            elif _UPDATE:
                analytics_event(category='system', action='update')

            analytics_event(category='system', action='start')

        # Schedule newsletters
        newsletter_handler.NEWSLETTER_SCHED.start()
        newsletter_handler.schedule_newsletters()

        _STARTED = True 
Example #17
Source File: test_steam_.py    From Yugioh-bot with MIT License 5 votes vote down vote up
def setUp(self):
        os.environ['LOG_CFG'] = r'D:\Sync\OneDrive\Yu-gi-oh_bot\config.ini'
        scheduler = BackgroundScheduler()
        dlRuntime = DuelLinkRunTime(default_config(r'D:\Sync\OneDrive\Yu-gi-oh_bot'), scheduler, False)
        self.provider = Steam(scheduler, default_config(r'D:\Sync\OneDrive\Yu-gi-oh_bot'), dlRuntime, False)
        self.provider.sleep_factor = 0.0
        self.loop = asyncio.get_event_loop()
        self.loop.set_default_executor(ThreadPoolExecutor(2))
        dlRuntime._loop = self.loop
        self.provider.is_process_running() 
Example #18
Source File: scheduler.py    From flask-apscheduler with Apache License 2.0 5 votes vote down vote up
def __init__(self, scheduler=None, app=None):
        self._scheduler = scheduler or BackgroundScheduler()
        self._host_name = socket.gethostname().lower()
        self._authentication_callback = None

        self.allowed_hosts = ['*']
        self.auth = None
        self.api_enabled = False
        self.api_prefix = '/scheduler'
        self.endpoint_prefix = 'scheduler.'
        self.app = None

        if app:
            self.init_app(app) 
Example #19
Source File: query_logger.py    From gateway-workflows with Apache License 2.0 5 votes vote down vote up
def register_job(self):
        succeed = False
        try:
            interval = self.get_value('poll', 'interval')
            edge_api = EdgeAPI(self.get_value('edge', 'url'), debug=True)
            edge_api.set_token(self.get_value('edge', 'token'))
            poller = self._create_poller()
            if self._scheduler is None:
                self._scheduler = BackgroundScheduler(daemon=True, timezone=pytz.utc)
                self._scheduler.start()

            if self._job is not None:
                self._job.remove()
                self._job = None

            if self._edge_api is not None:
                self._edge_api = None

            if self._poller is not None:
                self._poller = None

            if interval is not None and 0 < interval:
                self._edge_api = edge_api
                self._poller = poller
                self._job = self._scheduler.add_job(self.process_logs, 'interval', seconds=interval)
                succeed = True

        except Exception as e:
            if self._debug:
                print('DEBUG: Exceptin <%s>' % str(e))
        return succeed
#
# Followings are code that should be executed when this module is loaded.
# 
Example #20
Source File: fwrl_updater.py    From gateway-workflows with Apache License 2.0 5 votes vote down vote up
def register_synchronize_job(self):
        succeed = False
        try:
            interval = self.get_value('execution_interval')
            edge_api = EdgeAPI(self.get_value('edge_url'), debug=True)
            meraki_api = MerakiAPI(self.get_value('sdwan_key'), debug=True)
            if not edge_api.validate_edgeurl() or \
                not meraki_api.validate_api_key():
                return succeed

            if self._scheduler is None:
                self._scheduler = BackgroundScheduler(daemon=True, timezone=pytz.utc)
                self._scheduler.start()

            if self._job is not None:
                self._job.remove()
                self._job = None

            if interval is not None and 0 < interval:
                self._job = \
                    self._scheduler.add_job(self.synchronize_domainlists, 'interval', seconds=interval)
                succeed = True

        except Exception as e:
            if self._debug:
                print('DEBUG: Exceptin <%s>' % str(e))
        return succeed
#
# Followings are code that should be executed when this module is loaded.
# 
Example #21
Source File: sp_watcher.py    From gateway-workflows with Apache License 2.0 5 votes vote down vote up
def register_job(self):
        succeed = False
        try:
            edge_api = EdgeAPI(self.get_value('edge_url'), debug=False)
            if not edge_api.validate_edgeurl():
                return succeed

            if self._scheduler is None:
                self._scheduler = BackgroundScheduler(daemon=True, timezone=pytz.utc)
                self._scheduler.start()

            if self._job is not None:
                self._job.remove()
                self._job = None

            interval = self.get_value('execution_interval')
            if interval is not None and 0 < interval:
                self.watch_service_points()
                self._job = \
                    self._scheduler.add_job(self.watch_service_points, 'interval', seconds=interval)
                succeed = True

        except Exception as e:
            if self._debug:
                print('DEBUG: Exceptin <%s>' % str(e))
        return succeed
#
# Followings are code that should be executed when this module is loaded.
# 
Example #22
Source File: scheduler.py    From eNMS with GNU General Public License v3.0 5 votes vote down vote up
def configure_scheduler(self):
        self.scheduler = BackgroundScheduler(self.settings["config"])
        self.scheduler.start() 
Example #23
Source File: schedule.py    From pajbot with MIT License 5 votes vote down vote up
def init():
        if not ScheduleManager.base_scheduler:
            ScheduleManager.base_scheduler = BackgroundScheduler(daemon=True)
            ScheduleManager.base_scheduler.start() 
Example #24
Source File: app.py    From robotreviewer with GNU General Public License v3.0 5 votes vote down vote up
def initialize():
        log.info("Initializing clean-up task")
        scheduler = BackgroundScheduler()
        scheduler.start()
        scheduler.add_job(cleanup_database, 'interval', hours=12) 
Example #25
Source File: scheduler.py    From GovLens with MIT License 5 votes vote down vote up
def scrape_websites(self):
        scheduler = BackgroundScheduler()
        scheduler.add_job(
            self.scheduled_method,
            "cron",
            day_of_week=self.job_trigger_settings["day_of_job"],
            hour=self.job_trigger_settings["hour"],
            minute=self.job_trigger_settings["minute"],
            second=self.job_trigger_settings["second"],
        )
        try:
            scheduler.start()
        except (KeyboardInterrupt, SystemExit):
            pass 
Example #26
Source File: prometheus_ceilometer.py    From galaxia with Apache License 2.0 5 votes vote down vote up
def schedule_job(message):
    log.info("Received request to schedule the exporter job %s with job_id %s"
             % (message['exporter_name'], message['exporter_id']))
    metrics_list = message['metrics_list']

    for i in metrics_list:
        scheduler = BackgroundScheduler()
        scheduler.add_jobstore('sqlalchemy', url=CONF.gexporter.scheduler_db_url)
        job_id = message['exporter_id']
        scheduler.add_job(create_job, args=[i, message["time_interval"],
                                            job_id],
                          trigger='interval',
                          minutes=int(message["time_interval"]), id=job_id)
        try:
            print("Starting scheduler")
            scheduler.start()
        except Exception as ex:
            log.error("last cycle of Scheduler has hit an exception")
        """
        try:
            while True:
                print "In infinite loop"
                time.sleep(2)
        except (KeyboardInterrupt, SystemExit):
            scheduler.shutdown()
        """ 
Example #27
Source File: app.py    From docker-prometheus-cloudflare-exporter with Apache License 2.0 5 votes vote down vote up
def run():
    logging.info('Starting scrape service for zone "%s" using key [%s...]'
                 % (ZONE, AUTH_KEY[0:6]))

    update_latest()

    scheduler = BackgroundScheduler({'apscheduler.timezone': 'UTC'})
    scheduler.add_job(update_latest, 'interval', seconds=60)
    scheduler.start()

    try:
        app.run(host="0.0.0.0", port=SERVICE_PORT, threaded=True)
    finally:
        scheduler.shutdown() 
Example #28
Source File: scheduler.py    From StrategyEase-Python-SDK with MIT License 5 votes vote down vote up
def __init__(self):
        self._logger = logging.getLogger()

        config_path = os.path.join(os.path.expanduser('~'), '.strategyease_sdk', 'config', 'scheduler.ini')
        self._logger.info('Config path: %s', config_path)
        self._config = configparser.RawConfigParser()
        self._config.readfp(codecs.open(config_path, encoding="utf_8_sig"), )

        self._scheduler = BackgroundScheduler()
        self._client = Client(self._logger, **dict(self._config.items('StrategyEase'))) 
Example #29
Source File: produce.py    From sparrow with GNU General Public License v3.0 5 votes vote down vote up
def __init__(self):
        self.run_date = datetime.datetime.now() + datetime.timedelta(seconds=3)
        self.run_date = self.run_date.strftime('%Y-%m-%d %H:%M:%S')
        self.tm = time.strftime('%Y%m%d%H%M%S',time.localtime())
        self.scheduler = BackgroundScheduler()
        self.executors = {'default': ThreadPoolExecutor(10), 'processpool': ProcessPoolExecutor(5)}
        self.job_defaults = {'coalesce': False, 'max_instances': 1}
        self.scheduler.configure(timezone=pytz.timezone('Asia/Shanghai'),job_defaults=self.job_defaults,executors=self.executors) 
Example #30
Source File: produce.py    From sparrow with GNU General Public License v3.0 5 votes vote down vote up
def __init__(self):
        self.run_date = datetime.datetime.now() + datetime.timedelta(seconds=3)
        self.run_date = self.run_date.strftime('%Y-%m-%d %H:%M:%S')
        self.tt = time.strftime('%Y%m%d%H%M', time.localtime())
        self.scheduler = BackgroundScheduler()
        self.executors = {'default': ThreadPoolExecutor(20), 'processpool': ProcessPoolExecutor(5)}
        self.job_defaults = {'coalesce': False, 'max_instances': 3}
        self.scheduler.configure(timezone=pytz.timezone('Asia/Shanghai'),job_defaults=self.job_defaults,executors=self.executors)