Python daemon.pidfile() Examples

The following are 7 code examples of daemon.pidfile(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module daemon , or try the search function .
Example #1
Source File: queue_monitor.py    From EnergyPATHWAYS with MIT License 8 votes vote down vote up
def start_queue_monitor(poll_frequency, max_workers, user, group, directory):
    pw = pwd.getpwnam(user)
    gid = pw.pw_gid if group is None else grp.getgrnam(group).gr_gid
    # This will capture stderr from this process as well as all child
    # energyPATHWAYS processes. Normally it will be empty, but it can
    # help capture model startup problems that would otherwise be hard to see.
    err = open('/var/log/queue_monitor/qm_stderr_%s.log' % start_time, 'w+')

    with daemon.DaemonContext(
        files_preserve=[logging.root.handlers[0].stream.fileno()],
        pidfile=daemon.pidfile.PIDLockFile('/var/run/queue_monitor/queue_monitor.pid'),
        uid=pw.pw_uid,
        gid=gid,
        working_directory=directory,
        stderr=err
    ):
        logger.info('My process id is %i' % os.getpid())
        qm = QueueMonitor(poll_frequency, max_workers)
        qm.start() 
Example #2
Source File: kerberos_command.py    From airflow with Apache License 2.0 6 votes vote down vote up
def kerberos(args):
    """Start a kerberos ticket renewer"""
    print(settings.HEADER)

    if args.daemon:
        pid, stdout, stderr, _ = setup_locations(
            "kerberos", args.pid, args.stdout, args.stderr, args.log_file
        )
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            stdout=stdout,
            stderr=stderr,
        )

        with ctx:
            krb.run(principal=args.principal, keytab=args.keytab)

        stdout.close()
        stderr.close()
    else:
        krb.run(principal=args.principal, keytab=args.keytab) 
Example #3
Source File: endpoint.py    From funcX with Apache License 2.0 5 votes vote down vote up
def stop_endpoint(name: str = typer.Argument("default", autocompletion=complete_endpoint_name)):
    """ Stops an endpoint using the pidfile

    """

    endpoint_dir = os.path.join(State.FUNCX_DIR, name)
    pid_file = os.path.join(endpoint_dir, "daemon.pid")

    if os.path.exists(pid_file):
        logger.debug(f"{name} has a daemon.pid file")
        pid = None
        with open(pid_file, 'r') as f:
            pid = int(f.read())
        # Attempt terminating
        try:
            logger.debug("Signalling process: {}".format(pid))
            os.kill(pid, signal.SIGTERM)
            time.sleep(0.1)
            os.kill(pid, signal.SIGKILL)
            time.sleep(0.1)
            # Wait to confirm that the pid file disappears
            if not os.path.exists(pid_file):
                logger.info("Endpoint <{}> is now stopped".format(name))

        except OSError:
            logger.warning("Endpoint {} could not be terminated".format(name))
            logger.warning("Attempting Endpoint {} cleanup".format(name))
            os.remove(pid_file)
            sys.exit(-1)
    else:
        logger.info("Endpoint <{}> is not active.".format(name)) 
Example #4
Source File: scheduler_command.py    From airflow with Apache License 2.0 5 votes vote down vote up
def scheduler(args):
    """Starts Airflow Scheduler"""
    print(settings.HEADER)
    job = SchedulerJob(
        dag_id=args.dag_id,
        subdir=process_subdir(args.subdir),
        num_runs=args.num_runs,
        do_pickle=args.do_pickle)

    if args.daemon:
        pid, stdout, stderr, log_file = setup_locations("scheduler",
                                                        args.pid,
                                                        args.stdout,
                                                        args.stderr,
                                                        args.log_file)
        handle = setup_logging(log_file)
        stdout = open(stdout, 'w+')
        stderr = open(stderr, 'w+')

        ctx = daemon.DaemonContext(
            pidfile=TimeoutPIDLockFile(pid, -1),
            files_preserve=[handle],
            stdout=stdout,
            stderr=stderr,
        )
        with ctx:
            job.run()

        stdout.close()
        stderr.close()
    else:
        signal.signal(signal.SIGINT, sigint_handler)
        signal.signal(signal.SIGTERM, sigint_handler)
        signal.signal(signal.SIGQUIT, sigquit_handler)
        job.run() 
Example #5
Source File: celery_command.py    From airflow with Apache License 2.0 5 votes vote down vote up
def flower(args):
    """Starts Flower, Celery monitoring tool"""
    options = [
        conf.get('celery', 'BROKER_URL'),
        f"--address={args.hostname}",
        f"--port={args.port}",
    ]

    if args.broker_api:
        options.append(f"--broker-api={args.broker_api}")

    if args.url_prefix:
        options.append(f"--url-prefix={args.url_prefix}")

    if args.basic_auth:
        options.append(f"--basic-auth={args.basic_auth}")

    if args.flower_conf:
        options.append(f"--conf={args.flower_conf}")

    flower_cmd = FlowerCommand()

    if args.daemon:
        pidfile, stdout, stderr, _ = setup_locations(
            process="flower",
            pid=args.pid,
            stdout=args.stdout,
            stderr=args.stderr,
            log=args.log_file,
        )
        with open(stdout, "w+") as stdout, open(stderr, "w+") as stderr:
            ctx = daemon.DaemonContext(
                pidfile=TimeoutPIDLockFile(pidfile, -1),
                stdout=stdout,
                stderr=stderr,
            )
            with ctx:
                flower_cmd.execute_from_commandline(argv=options)
    else:
        flower_cmd.execute_from_commandline(argv=options) 
Example #6
Source File: needl.py    From Needl with MIT License 5 votes vote down vote up
def main():
    parser = argparse.ArgumentParser(description=needl.__description__)
    parser.add_argument('--datadir', default=os.getcwd() + '/data', help='Data directory')
    parser.add_argument('-d', '--daemon', action='store_true', help='Run as a deamon')
    parser.add_argument('-v', '--verbose', action='store_true', help='Increase logging')
    parser.add_argument('--logfile', type=argparse.FileType('a'), default=sys.stdout, help='Log to this file. Default is stdout.')
    parser.add_argument('--pidfile', default='/tmp/needl.pid', help='Save process PID to this file. Default is /tmp/needl.pid. Only valid when running as a daemon.')
    args = parser.parse_args()

    if args.daemon and args.logfile is sys.stdout:
        args.logfile = open('/tmp/needl.log', 'a')

    needl.init(args)
    daemonize(args.logfile, args.pidfile) if args.daemon else start() 
Example #7
Source File: needl.py    From Needl with MIT License 5 votes vote down vote up
def daemonize(logfile, pidfile):
    needl.log.info('Daemonizing and logging to %s', logfile)

    with daemon.DaemonContext(working_directory=os.getcwd(),
                              stderr=logfile,
                              umask=0o002,
                              pidfile=daemon.pidfile.PIDLockFile(pidfile)) as dc:

        start()