Python gevent.subprocess() Examples

The following are 5 code examples of gevent.subprocess(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module gevent , or try the search function .
Example #1
Source File: subproc.py    From recipes-py with Apache License 2.0 6 votes vote down vote up
def _reap_workers(workers, to_close, debug_log):
    """Collects the IO workers created with _mk_workers.

    After killing the workers, also closes the subprocess's open PIPE handles.

    See _safe_close for caveats around closing handles on windows.

    Args:
      * workers (List[Greenlet]) - The IO workers to kill.
      * to_close (List[...]) - (see _mk_workers for definition). The handles to
        close. These originate from the `Popen.std{out,err}` handles when the
        recipe engine had to use PIPEs.
      * debug_log (..stream.StreamEngine.Stream)

    Should not raise an exception.
    """
    debug_log.write_line('reaping IO workers...')
    for worker in workers:
      worker.kill()
    gevent.wait(workers)
    debug_log.write_line('  done')
    for handle_name, handle in to_close:
      _safe_close(debug_log, handle_name, handle) 
Example #2
Source File: subproc.py    From recipes-py with Apache License 2.0 5 votes vote down vote up
def _mk_workers(step, proc, pipes):
    """Makes greenlets to shuttle lines from the process's PIPE'd std{out,err}
    handles to the recipe Step's std{out,err} handles.

    NOTE: This applies to @@@annotator@@@ runs when allow_subannotations=False;
    Step.std{out,err} will be Stream objects which don't implement `fileno()`,
    but add an '!' in front of all lines starting with '@@@'. In build.proto
    mode this code path should NOT be active at all; Placeholders will be
    redirected directly to files on disk and non-placeholders will go straight
    to butler (i.e. regular file handles).

    Args:

      * step (..step_runner.Step) - The Step object describing what we're
        supposed to run.
      * proc (subprocess.Popen) - The running subprocess.
      * pipes (Set[str]) - A subset of {'stdout', 'stderr'} to make worker
        greenlets for.

    Returns Tuple[
      workers: List[Greenlet],
      to_close: List[Tuple[
        handle_name: str,
        proc_handle: fileobj,
      ]]
    ]. Both returned values are expected to be passed directly to
    `_reap_workers` without inspection or alteration.
    """
    workers = []
    to_close = []
    for handle_name in pipes:
      proc_handle = getattr(proc, handle_name)
      to_close.append((handle_name, proc_handle))
      workers.append(gevent.spawn(
          _copy_lines, proc_handle, getattr(step, handle_name),
      ))
    return workers, to_close 
Example #3
Source File: subproc.py    From recipes-py with Apache License 2.0 5 votes vote down vote up
def _fd_for_out(raw_val):
  if hasattr(raw_val, 'fileno'):
    return raw_val.fileno()
  if isinstance(raw_val, str):
    return open(raw_val, 'wb')
  return subprocess.PIPE 
Example #4
Source File: subproc.py    From recipes-py with Apache License 2.0 4 votes vote down vote up
def _wait_proc(proc, gid, timeout, debug_log):
    """Waits for the completion (or timeout) of `proc`.

    Args:

      * proc (subprocess.Popen) - The actual running subprocess to wait for.
      * gid (int|None) - The group ID of the process.
      * timeout (Number|None) - The number of seconds to wait for the process to
        end (or None for no timeout).
      * debug_log (..stream.StreamEngine.Stream)

    Returns the ExecutionResult.

    Should not raise an exception.
    """
    ret = ExecutionResult()

    # We're about to do gevent-blocking operations (waiting on the subprocess)
    # and so another greenlet could kill us; we guard all of these operations
    # with a `try/except GreenletExit` to handle this and return an
    # ExecutionResult(was_cancelled=True) in that case.
    #
    # The engine will raise a new GreenletExit exception after processing this
    # step.
    try:
      # TODO(iannucci): This API changes in python3 to raise an exception on
      # timeout.
      proc.wait(timeout)
      ret = attr.evolve(ret, retcode=proc.poll())
      debug_log.write_line(
          'finished waiting for process, retcode %r' % ret.retcode)

      # TODO(iannucci): Make leaking subprocesses explicit (e.g. goma compiler
      # daemon). Better, change deamons to be owned by a gevent Greenlet (so that
      # we don't need to leak processes ever).
      #
      # _kill(proc, gid)  # In case of leaked subprocesses or timeout.
      if ret.retcode is None:
        debug_log.write_line('timeout! killing process group %r' % gid)
        # Process timed out, kill it. Currently all uses of non-None timeout
        # intend to actually kill the subprocess when the timeout pops.
        ret = attr.evolve(ret, retcode=_kill(proc, gid), had_timeout=True)

    except gevent.GreenletExit:
      debug_log.write_line(
          'caught GreenletExit, killing process group %r' % (gid,))
      ret = attr.evolve(ret, retcode=_kill(proc, gid), was_cancelled=True)

    return ret 
Example #5
Source File: subproc.py    From recipes-py with Apache License 2.0 4 votes vote down vote up
def _safe_close(debug_log, handle_name, handle):
  """Safely attempt to close the given handle.

  Args:

    * debug_log (Stream) - Stream to write debug information to about closing
      this handle.
    * handle_name (str) - The name of the handle (like 'stdout', 'stderr')
    * handle (file-like-object) - The file object to call .close() on.

  NOTE: On Windows this may end up leaking threads for processes which spawn
  'daemon' children that hang onto the handles we pass. In this case debug_log
  is updated with as much detail as we know and the gevent threadpool's maxsize
  is increased by 2 (one thread blocked on reading from the handle, and one
  thread blocked on trying to close the handle).
  """
  try:
    debug_log.write_line('closing handle %r' % handle_name)
    with gevent.Timeout(.1):
      handle.close()
    debug_log.write_line('  closed!')

  except gevent.Timeout:
    # This should never happen... except on Windows when the process we launched
    # itself leaked.
    debug_log.write_line('  LEAKED: timeout closing handle')
    # We assume we've now leaked 2 threads; one is blocked on 'read' and the
    # other is blocked on 'close'. Add two more threads to the pool so we do not
    # globally block the recipe engine on subsequent steps.
    gevent.get_hub().threadpool.maxsize += 2

  except IOError as ex:
    # TODO(iannucci): Currently this leaks handles on Windows for processes like
    # the goma compiler proxy; because of python2.7's inability to set
    # close_fds=True and also redirect std handles, daemonized subprocesses
    # actually inherit our handles (yuck).
    #
    # This is fixable on python3, but not likely to be fixable on python 2.
    debug_log.write_line('  LEAKED: unable to close: %r' % (ex,))
    # We assume we've now leaked 2 threads; one is blocked on 'read' and the
    # other is blocked on 'close'. Add two more threads to the pool so we do not
    # globally block the recipe engine on subsequent steps.
    gevent.get_hub().threadpool.maxsize += 2

  except RuntimeError:
    # NOTE(gevent): This can happen as a race between the worker greenlet and
    # the process ending. See gevent.subprocess.Popen.communicate, which does
    # the same thing.
    debug_log.write_line('  LEAKED?: race with IO worker')