Python multiprocessing.current_process() Examples
The following are 30
code examples of multiprocessing.current_process().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
multiprocessing
, or try the search function
.
Example #1
Source File: unittest_checker_typecheck.py From python-netsurv with MIT License | 6 votes |
def test_staticmethod_multiprocessing_call(self): """Make sure not-callable isn't raised for descriptors astroid can't process descriptors correctly so pylint needs to ignore not-callable for them right now Test for https://github.com/PyCQA/pylint/issues/1699 """ call = astroid.extract_node( """ import multiprocessing multiprocessing.current_process() #@ """ ) with self.assertNoMessages(): self.checker.visit_call(call)
Example #2
Source File: managers.py From BinderFilter with MIT License | 6 votes |
def serve_forever(self): ''' Run the server forever ''' current_process()._manager_server = self try: try: while 1: try: c = self.listener.accept() except (OSError, IOError): continue t = threading.Thread(target=self.handle_request, args=(c,)) t.daemon = True t.start() except (KeyboardInterrupt, SystemExit): pass finally: self.stop = 999 self.listener.close()
Example #3
Source File: host_utils.py From mysql_utils with GNU General Public License v2.0 | 6 votes |
def check_dict_of_procs(proc_dict): """ Check a dict of process for exit, error, etc... Args: A dict of processes Returns: True if all processes have completed with return status 0 False is some processes are still running An exception is generated if any processes have completed with a returns status other than 0 """ success = True for proc in proc_dict: ret = proc_dict[proc].poll() if ret is None: # process has not yet terminated success = False elif ret != 0: raise Exception('{proc_id}: {proc} encountered an error' ''.format( proc_id=multiprocessing.current_process().name, proc=proc)) return success
Example #4
Source File: mysql_backup_csv.py From mysql_utils with GNU General Public License v2.0 | 6 votes |
def upload_schema(self, db, table, tmp_dir_db): """ Upload the schema of a table to s3 Args: db - the db to be backed up table - the table to be backed up tmp_dir_db - temporary storage used for all tables in the db """ (schema_path, _, _) = backup.get_csv_backup_paths( self.instance, db, table, self.datestamp) create_stm = mysql_lib.show_create_table(self.instance, db, table) log.debug('{proc_id}: Uploading schema to {schema_path}' ''.format(schema_path=schema_path, proc_id=multiprocessing.current_process().name)) boto_conn = boto.connect_s3() bucket = boto_conn.get_bucket(self.upload_bucket, validate=False) key = bucket.new_key(schema_path) key.set_contents_from_string(create_stm)
Example #5
Source File: mysql_backup_csv.py From mysql_utils with GNU General Public License v2.0 | 6 votes |
def upload_pitr_data(self, db, tbl, pitr_data): """ Upload a file of PITR data to s3 for each table Args: db - the db that was backed up. tbl - the table that was backed up. pitr_data - a dict of various data that might be helpful for running a PITR """ zk = host_utils.MysqlZookeeper() replica_set = zk.get_replica_set_from_instance(self.instance) s3_path = PATH_PITR_DATA.format(replica_set=replica_set, date=self.datestamp, db_name=db, table=tbl) log.debug('{proc_id}: {db}.{tbl} Uploading pitr data to {s3_path}' ''.format(s3_path=s3_path, proc_id=multiprocessing.current_process().name, db=db, tbl=tbl)) boto_conn = boto.connect_s3() bucket = boto_conn.get_bucket(self.upload_bucket, validate=False) key = bucket.new_key(s3_path) key.set_contents_from_string(json.dumps(pitr_data))
Example #6
Source File: ipcontrollerapp.py From Computable with MIT License | 6 votes |
def launch_new_instance(*args, **kwargs): """Create and run the IPython controller""" if sys.platform == 'win32': # make sure we don't get called from a multiprocessing subprocess # this can result in infinite Controllers being started on Windows # which doesn't have a proper fork, so multiprocessing is wonky # this only comes up when IPython has been installed using vanilla # setuptools, and *not* distribute. import multiprocessing p = multiprocessing.current_process() # the main process has name 'MainProcess' # subprocesses will have names like 'Process-1' if p.name != 'MainProcess': # we are a subprocess, don't start another Controller! return return IPControllerApp.launch_instance(*args, **kwargs)
Example #7
Source File: mysql_backup_csv.py From mysql_utils with GNU General Public License v2.0 | 6 votes |
def cleanup_fifo(self, fifo): """ Safely cleanup a fifo that is an unknown state Args: fifo - The path to the fifo """ log.debug('{proc_id}: Cleanup of {fifo} started' ''.format(proc_id=multiprocessing.current_process().name, fifo=fifo)) cat_proc = subprocess.Popen('timeout 5 cat {} >/dev/null'.format(fifo), shell=True) cat_proc.wait() os.remove(fifo) log.debug('{proc_id}: Cleanup of {fifo} complete' ''.format(proc_id=multiprocessing.current_process().name, fifo=fifo))
Example #8
Source File: reduction.py From BinderFilter with MIT License | 6 votes |
def _get_listener(): global _listener if _listener is None: _lock.acquire() try: if _listener is None: debug('starting listener and thread for sending handles') _listener = Listener(authkey=current_process().authkey) t = threading.Thread(target=_serve) t.daemon = True t.start() finally: _lock.release() return _listener
Example #9
Source File: managers.py From BinderFilter with MIT License | 6 votes |
def RebuildProxy(func, token, serializer, kwds): ''' Function used for unpickling proxy objects. If possible the shared object is returned, or otherwise a proxy for it. ''' server = getattr(current_process(), '_manager_server', None) if server and server.address == token.address: return server.id_to_obj[token.id][0] else: incref = ( kwds.pop('incref', True) and not getattr(current_process(), '_inheriting', False) ) return func(token, serializer, incref=incref, **kwds) # # Functions to create proxies and proxy types #
Example #10
Source File: multiprocessing_introduction_1.py From youtube_tutorials with GNU General Public License v3.0 | 6 votes |
def square(number): """The function squares whatever number it is provided.""" result = number * number # We can use the OS module in Python to print out the process ID # assigned to the call of this function assigned by the operating # system. proc_id = os.getpid() print(f"Process ID: {proc_id}") # We can also use the "current_process" function to get the name # of the Process object: process_name = current_process().name print(f"Process Name: {process_name}") print(f"The number {number} squares to {result}.")
Example #11
Source File: reduction.py From ironpython2 with Apache License 2.0 | 6 votes |
def _get_listener(): global _listener if _listener is None: _lock.acquire() try: if _listener is None: debug('starting listener and thread for sending handles') _listener = Listener(authkey=current_process().authkey) t = threading.Thread(target=_serve) t.daemon = True t.start() finally: _lock.release() return _listener
Example #12
Source File: reduction.py From oss-ftp with MIT License | 6 votes |
def _get_listener(): global _listener if _listener is None: _lock.acquire() try: if _listener is None: debug('starting listener and thread for sending handles') _listener = Listener(authkey=current_process().authkey) t = threading.Thread(target=_serve) t.daemon = True t.start() finally: _lock.release() return _listener
Example #13
Source File: managers.py From oss-ftp with MIT License | 6 votes |
def serve_forever(self): ''' Run the server forever ''' current_process()._manager_server = self try: try: while 1: try: c = self.listener.accept() except (OSError, IOError): continue t = threading.Thread(target=self.handle_request, args=(c,)) t.daemon = True t.start() except (KeyboardInterrupt, SystemExit): pass finally: self.stop = 999 self.listener.close()
Example #14
Source File: unittest_checker_typecheck.py From python-netsurv with MIT License | 6 votes |
def test_staticmethod_multiprocessing_call(self): """Make sure not-callable isn't raised for descriptors astroid can't process descriptors correctly so pylint needs to ignore not-callable for them right now Test for https://github.com/PyCQA/pylint/issues/1699 """ call = astroid.extract_node( """ import multiprocessing multiprocessing.current_process() #@ """ ) with self.assertNoMessages(): self.checker.visit_call(call)
Example #15
Source File: hmdb51.py From gluon-cv with Apache License 2.0 | 6 votes |
def run_warp_optical_flow(vid_item, dev_id=0): full_path, vid_path, vid_id = vid_item vid_name = vid_path.split('.')[0] out_full_path = osp.join(args.out_dir, vid_name) try: os.mkdir(out_full_path) except OSError: pass current = current_process() dev_id = (int(current._identity[0]) - 1) % args.num_gpu flow_x_path = '{}/flow_x'.format(out_full_path) flow_y_path = '{}/flow_y'.format(out_full_path) cmd = osp.join(args.df_path + 'build/extract_warp_gpu') + \ ' -f={} -x={} -y={} -b=20 -t=1 -d={} -s=1 -o={}'.format( quote(full_path), quote(flow_x_path), quote(flow_y_path), dev_id, args.out_format) os.system(cmd) print('warp on {} {} done'.format(vid_id, vid_name)) sys.stdout.flush() return True
Example #16
Source File: managers.py From ironpython2 with Apache License 2.0 | 6 votes |
def RebuildProxy(func, token, serializer, kwds): ''' Function used for unpickling proxy objects. If possible the shared object is returned, or otherwise a proxy for it. ''' server = getattr(current_process(), '_manager_server', None) if server and server.address == token.address: return server.id_to_obj[token.id][0] else: incref = ( kwds.pop('incref', True) and not getattr(current_process(), '_inheriting', False) ) return func(token, serializer, incref=incref, **kwds) # # Functions to create proxies and proxy types #
Example #17
Source File: db_engine.py From ReadableWebProxy with BSD 3-Clause "New" or "Revised" License | 6 votes |
def delete_db_session(postfix="", flask_sess_if_possible=True): if flags.IS_FLASK and flask_sess_if_possible: # No need to do anything with flask sess return cpid = multiprocessing.current_process().name ctid = threading.current_thread().name csid = "{}-{}-{}".format(cpid, ctid, postfix) # print("Releasing session for thread: %s" % csid) # print(traceback.print_stack()) # print("==========================") if csid in SESSIONS: with SESSION_LOCK: # check if the session was created while # we were waiting for the lock if not csid in SESSIONS: return SESSIONS[csid][1].close() del SESSIONS[csid] # print("Deleted session for id: ", csid)
Example #18
Source File: managers.py From oss-ftp with MIT License | 6 votes |
def RebuildProxy(func, token, serializer, kwds): ''' Function used for unpickling proxy objects. If possible the shared object is returned, or otherwise a proxy for it. ''' server = getattr(current_process(), '_manager_server', None) if server and server.address == token.address: return server.id_to_obj[token.id][0] else: incref = ( kwds.pop('incref', True) and not getattr(current_process(), '_inheriting', False) ) return func(token, serializer, incref=incref, **kwds) # # Functions to create proxies and proxy types #
Example #19
Source File: kinetics400.py From gluon-cv with Apache License 2.0 | 6 votes |
def run_warp_optical_flow(vid_item, dev_id=0): full_path, vid_path, vid_id = vid_item vid_name = vid_path.split('.')[0] out_full_path = osp.join(args.out_dir, vid_name) try: os.mkdir(out_full_path) except OSError: pass current = current_process() dev_id = (int(current._identity[0]) - 1) % args.num_gpu flow_x_path = '{}/flow_x'.format(out_full_path) flow_y_path = '{}/flow_y'.format(out_full_path) cmd = osp.join(args.df_path + 'build/extract_warp_gpu') + \ ' -f={} -x={} -y={} -b=20 -t=1 -d={} -s=1 -o={}'.format( quote(full_path), quote(flow_x_path), quote(flow_y_path), dev_id, args.out_format) os.system(cmd) print('warp on {} {} done'.format(vid_id, vid_name)) sys.stdout.flush() return True
Example #20
Source File: db_engine.py From ReadableWebProxy with BSD 3-Clause "New" or "Revised" License | 6 votes |
def get_engine(): cpid = multiprocessing.current_process().name ctid = threading.current_thread().name csid = "{}-{}".format(cpid, ctid) if not csid in ENGINES: with ENGINE_LOCK: # Check if the engine was created while we were # waiting on the lock. if csid in ENGINES: return ENGINES[csid] log.info("INFO: Creating engine for process! Engine name: '%s'" % csid) ENGINES[csid] = create_engine(SQLALCHEMY_DATABASE_URI, isolation_level="READ COMMITTED") # isolation_level="REPEATABLE READ") return ENGINES[csid]
Example #21
Source File: managers.py From ironpython2 with Apache License 2.0 | 6 votes |
def serve_forever(self): ''' Run the server forever ''' current_process()._manager_server = self try: try: while 1: try: c = self.listener.accept() except (OSError, IOError): continue t = threading.Thread(target=self.handle_request, args=(c,)) t.daemon = True t.start() except (KeyboardInterrupt, SystemExit): pass finally: self.stop = 999 self.listener.close()
Example #22
Source File: managers.py From BinderFilter with MIT License | 5 votes |
def __init__(self, address=None, authkey=None, serializer='pickle'): if authkey is None: authkey = current_process().authkey self._address = address # XXX not final address if eg ('', 0) self._authkey = AuthenticationString(authkey) self._state = State() self._state.value = State.INITIAL self._serializer = serializer self._Listener, self._Client = listener_client[serializer]
Example #23
Source File: shared.py From botogram with MIT License | 5 votes |
def _command(self, command, arg): """Send a command""" ipc = multiprocessing.current_process().ipc return ipc.command(command, arg)
Example #24
Source File: kinetics400.py From gluon-cv with Apache License 2.0 | 5 votes |
def run_optical_flow(vid_item, dev_id=0): full_path, vid_path, vid_id = vid_item vid_name = vid_path.split('.')[0] out_full_path = osp.join(args.out_dir, vid_name) try: os.mkdir(out_full_path) except OSError: pass current = current_process() dev_id = (int(current._identity[0]) - 1) % args.num_gpu image_path = '{}/img'.format(out_full_path) flow_x_path = '{}/flow_x'.format(out_full_path) flow_y_path = '{}/flow_y'.format(out_full_path) cmd = osp.join(args.df_path, 'build/extract_gpu') + \ ' -f={} -x={} -y={} -i={} -b=20 -t=1 -d={} -s=1 -o={} -w={} -h={}' \ .format( quote(full_path), quote(flow_x_path), quote(flow_y_path), quote(image_path), dev_id, args.out_format, args.new_width, args.new_height) os.system(cmd) print('{} {} done'.format(vid_id, vid_name)) sys.stdout.flush() return True
Example #25
Source File: process_handler.py From clusterfuzz with Apache License 2.0 | 5 votes |
def get_process(): """Return a multiprocessing process object (with bug fixes).""" if environment.is_trusted_host(): # forking/multiprocessing is unsupported because of the RPC connection. return threading.Thread # FIXME(unassigned): Remove this hack after real bug is fixed. # pylint: disable=protected-access multiprocessing.current_process()._identity = () return multiprocessing.Process
Example #26
Source File: bonjour_process.py From Archive-SE with Apache License 2.0 | 5 votes |
def execute_function(func, args): result = func(args) return '%s says that %s %s = %s' % \ (current_process().name, func.__name__, args, result) # # Functions referenced by tasks #
Example #27
Source File: hmdb51.py From gluon-cv with Apache License 2.0 | 5 votes |
def run_optical_flow(vid_item, dev_id=0): full_path, vid_path, vid_id = vid_item vid_name = vid_path.split('.')[0] out_full_path = osp.join(args.out_dir, vid_name) try: os.mkdir(out_full_path) except OSError: pass current = current_process() dev_id = (int(current._identity[0]) - 1) % args.num_gpu image_path = '{}/img'.format(out_full_path) flow_x_path = '{}/flow_x'.format(out_full_path) flow_y_path = '{}/flow_y'.format(out_full_path) cmd = osp.join(args.df_path, 'build/extract_gpu') + \ ' -f={} -x={} -y={} -i={} -b=20 -t=1 -d={} -s=1 -o={} -w={} -h={}' \ .format( quote(full_path), quote(flow_x_path), quote(flow_y_path), quote(image_path), dev_id, args.out_format, args.new_width, args.new_height) os.system(cmd) print('{} {} done'.format(vid_id, vid_name)) sys.stdout.flush() return True
Example #28
Source File: reduction.py From BinderFilter with MIT License | 5 votes |
def rebuild_handle(pickled_data): address, handle, inherited = pickled_data if inherited: return handle sub_debug('rebuilding handle %d', handle) conn = Client(address, authkey=current_process().authkey) conn.send((handle, os.getpid())) new_handle = recv_handle(conn) conn.close() return new_handle # # Register `_multiprocessing.Connection` with `ForkingPickler` #
Example #29
Source File: ucf101.py From gluon-cv with Apache License 2.0 | 5 votes |
def run_optical_flow(vid_item, dev_id=0): full_path, vid_path, vid_id = vid_item vid_name = vid_path.split('.')[0] out_full_path = osp.join(args.out_dir, vid_name) try: os.mkdir(out_full_path) except OSError: pass current = current_process() dev_id = (int(current._identity[0]) - 1) % args.num_gpu image_path = '{}/img'.format(out_full_path) flow_x_path = '{}/flow_x'.format(out_full_path) flow_y_path = '{}/flow_y'.format(out_full_path) cmd = osp.join(args.df_path, 'build/extract_gpu') + \ ' -f={} -x={} -y={} -i={} -b=20 -t=1 -d={} -s=1 -o={} -w={} -h={}' \ .format( quote(full_path), quote(flow_x_path), quote(flow_y_path), quote(image_path), dev_id, args.out_format, args.new_width, args.new_height) os.system(cmd) print('{} {} done'.format(vid_id, vid_name)) sys.stdout.flush() return True
Example #30
Source File: managers.py From BinderFilter with MIT License | 5 votes |
def AutoProxy(token, serializer, manager=None, authkey=None, exposed=None, incref=True): ''' Return an auto-proxy for `token` ''' _Client = listener_client[serializer][1] if exposed is None: conn = _Client(token.address, authkey=authkey) try: exposed = dispatch(conn, None, 'get_methods', (token,)) finally: conn.close() if authkey is None and manager is not None: authkey = manager._authkey if authkey is None: authkey = current_process().authkey ProxyType = MakeProxyType('AutoProxy[%s]' % token.typeid, exposed) proxy = ProxyType(token, serializer, manager=manager, authkey=authkey, incref=incref) proxy._isauto = True return proxy # # Types/callables which we will register with SyncManager #