Python multiprocessing.get_all_start_methods() Examples
The following are 14
code examples of multiprocessing.get_all_start_methods().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
multiprocessing
, or try the search function
.
Example #1
Source File: test_vec_envs.py From stable-baselines with MIT License | 6 votes |
def test_subproc_start_method(): start_methods = [None] # Only test thread-safe methods. Others may deadlock tests! (gh/428) safe_methods = {'forkserver', 'spawn'} available_methods = multiprocessing.get_all_start_methods() start_methods += list(safe_methods.intersection(available_methods)) space = gym.spaces.Discrete(2) def obs_assert(obs): return check_vecenv_obs(obs, space) for start_method in start_methods: vec_env_class = functools.partial(SubprocVecEnv, start_method=start_method) check_vecenv_spaces(vec_env_class, space, obs_assert) with pytest.raises(ValueError, match="cannot find context for 'illegal_method'"): vec_env_class = functools.partial(SubprocVecEnv, start_method='illegal_method') check_vecenv_spaces(vec_env_class, space, obs_assert)
Example #2
Source File: configuration.py From airflow with Apache License 2.0 | 6 votes |
def _validate_config_dependencies(self): """ Validate that config values aren't invalid given other config values or system-level limitations and requirements. """ if ( self.get("core", "executor") not in ('DebugExecutor', 'SequentialExecutor') and "sqlite" in self.get('core', 'sql_alchemy_conn')): raise AirflowConfigException( "error: cannot use sqlite with the {}".format( self.get('core', 'executor'))) if self.has_option('core', 'mp_start_method'): mp_start_method = self.get('core', 'mp_start_method') start_method_options = multiprocessing.get_all_start_methods() if mp_start_method not in start_method_options: raise AirflowConfigException( "mp_start_method should not be " + mp_start_method + ". Possible values are " + ", ".join(start_method_options))
Example #3
Source File: face_detection_cli.py From face_recognition with MIT License | 6 votes |
def process_images_in_process_pool(images_to_check, number_of_cpus, model): if number_of_cpus == -1: processes = None else: processes = number_of_cpus # macOS will crash due to a bug in libdispatch if you don't use 'forkserver' context = multiprocessing if "forkserver" in multiprocessing.get_all_start_methods(): context = multiprocessing.get_context("forkserver") pool = context.Pool(processes=processes) function_parameters = zip( images_to_check, itertools.repeat(model), ) pool.starmap(test_image, function_parameters)
Example #4
Source File: face_recognition_cli.py From face_recognition with MIT License | 6 votes |
def process_images_in_process_pool(images_to_check, known_names, known_face_encodings, number_of_cpus, tolerance, show_distance): if number_of_cpus == -1: processes = None else: processes = number_of_cpus # macOS will crash due to a bug in libdispatch if you don't use 'forkserver' context = multiprocessing if "forkserver" in multiprocessing.get_all_start_methods(): context = multiprocessing.get_context("forkserver") pool = context.Pool(processes=processes) function_parameters = zip( images_to_check, itertools.repeat(known_names), itertools.repeat(known_face_encodings), itertools.repeat(tolerance), itertools.repeat(show_distance) ) pool.starmap(test_image, function_parameters)
Example #5
Source File: subproc_vec_env.py From stable-baselines with MIT License | 5 votes |
def __init__(self, env_fns, start_method=None): self.waiting = False self.closed = False n_envs = len(env_fns) if start_method is None: # Fork is not a thread safe method (see issue #217) # but is more user friendly (does not require to wrap the code in # a `if __name__ == "__main__":`) forkserver_available = 'forkserver' in multiprocessing.get_all_start_methods() start_method = 'forkserver' if forkserver_available else 'spawn' ctx = multiprocessing.get_context(start_method) self.remotes, self.work_remotes = zip(*[ctx.Pipe(duplex=True) for _ in range(n_envs)]) self.processes = [] for work_remote, remote, env_fn in zip(self.work_remotes, self.remotes, env_fns): args = (work_remote, remote, CloudpickleWrapper(env_fn)) # daemon=True: if the main process crashes, we should not cause things to hang process = ctx.Process(target=_worker, args=args, daemon=True) # pytype:disable=attribute-error process.start() self.processes.append(process) work_remote.close() self.remotes[0].send(('get_spaces', None)) observation_space, action_space = self.remotes[0].recv() VecEnv.__init__(self, len(env_fns), observation_space, action_space)
Example #6
Source File: test_concurrency.py From coveragepy-bbmirror with Apache License 2.0 | 5 votes |
def try_multiprocessing_code( self, code, expected_out, the_module, concurrency="multiprocessing" ): """Run code using multiprocessing, it should produce `expected_out`.""" self.make_file("multi.py", code) self.make_file(".coveragerc", """\ [run] concurrency = %s source = . """ % concurrency) if env.PYVERSION >= (3, 4): start_methods = ['fork', 'spawn'] else: start_methods = [''] for start_method in start_methods: if start_method and start_method not in multiprocessing.get_all_start_methods(): continue out = self.run_command("coverage run multi.py %s" % (start_method,)) expected_cant_trace = cant_trace_msg(concurrency, the_module) if expected_cant_trace is not None: self.assertEqual(out, expected_cant_trace) else: self.assertEqual(out.rstrip(), expected_out) out = self.run_command("coverage combine") self.assertEqual(out, "") out = self.run_command("coverage report -m") last_line = self.squeezed_lines(out)[-1] self.assertRegex(last_line, r"multi.py \d+ 0 100%")
Example #7
Source File: test_concurrency.py From coveragepy-bbmirror with Apache License 2.0 | 5 votes |
def try_multiprocessing_code_with_branching(self, code, expected_out): """Run code using multiprocessing, it should produce `expected_out`.""" self.make_file("multi.py", code) self.make_file("multi.rc", """\ [run] concurrency = multiprocessing branch = True """) if env.PYVERSION >= (3, 4): start_methods = ['fork', 'spawn'] else: start_methods = [''] for start_method in start_methods: if start_method and start_method not in multiprocessing.get_all_start_methods(): continue out = self.run_command("coverage run --rcfile=multi.rc multi.py %s" % (start_method,)) self.assertEqual(out.rstrip(), expected_out) out = self.run_command("coverage combine") self.assertEqual(out, "") out = self.run_command("coverage report -m") last_line = self.squeezed_lines(out)[-1] self.assertRegex(last_line, r"multi.py \d+ 0 \d+ 0 100%")
Example #8
Source File: _test_multiprocessing.py From Fluid-Designer with GNU General Public License v3.0 | 5 votes |
def test_get_all(self): methods = multiprocessing.get_all_start_methods() if sys.platform == 'win32': self.assertEqual(methods, ['spawn']) else: self.assertTrue(methods == ['fork', 'spawn'] or methods == ['fork', 'spawn', 'forkserver']) # # Check that killing process does not leak named semaphores #
Example #9
Source File: _test_multiprocessing.py From ironpython3 with Apache License 2.0 | 5 votes |
def test_get_all(self): methods = multiprocessing.get_all_start_methods() if sys.platform == 'win32': self.assertEqual(methods, ['spawn']) else: self.assertTrue(methods == ['fork', 'spawn'] or methods == ['fork', 'spawn', 'forkserver']) # # Check that killing process does not leak named semaphores #
Example #10
Source File: multiprocessing.py From kitty with GNU General Public License v3.0 | 5 votes |
def get_process_pool_executor( prefer_fork: bool = False, max_workers: Optional[int] = None, initializer: Optional[Callable] = None, initargs: Tuple[Any, ...] = () ) -> ProcessPoolExecutor: if prefer_fork and 'fork' in get_all_start_methods(): ctx: Union[context.DefaultContext, context.ForkContext] = get_context('fork') else: monkey_patch_multiprocessing() ctx = get_context() try: return ProcessPoolExecutor(max_workers=max_workers, initializer=initializer, initargs=initargs, mp_context=ctx) except TypeError: return ProcessPoolExecutor(max_workers=max_workers, initializer=initializer, initargs=initargs)
Example #11
Source File: test_concurrency.py From coveragepy with Apache License 2.0 | 5 votes |
def try_multiprocessing_code( self, code, expected_out, the_module, nprocs, concurrency="multiprocessing", args="" ): """Run code using multiprocessing, it should produce `expected_out`.""" self.make_file("multi.py", code) self.make_file(".coveragerc", """\ [run] concurrency = %s source = . """ % concurrency) if env.PYVERSION >= (3, 4): start_methods = ['fork', 'spawn'] else: start_methods = [''] for start_method in start_methods: if start_method and start_method not in multiprocessing.get_all_start_methods(): continue remove_files(".coverage", ".coverage.*") cmd = "coverage run {args} multi.py {start_method}".format( args=args, start_method=start_method, ) out = self.run_command(cmd) expected_cant_trace = cant_trace_msg(concurrency, the_module) if expected_cant_trace is not None: self.assertEqual(out, expected_cant_trace) else: self.assertEqual(out.rstrip(), expected_out) self.assertEqual(len(glob.glob(".coverage.*")), nprocs + 1) out = self.run_command("coverage combine") self.assertEqual(out, "") out = self.run_command("coverage report -m") last_line = self.squeezed_lines(out)[-1] self.assertRegex(last_line, r"multi.py \d+ 0 100%")
Example #12
Source File: test_concurrency.py From coveragepy with Apache License 2.0 | 5 votes |
def try_multiprocessing_code_with_branching(self, code, expected_out): """Run code using multiprocessing, it should produce `expected_out`.""" self.make_file("multi.py", code) self.make_file("multi.rc", """\ [run] concurrency = multiprocessing branch = True omit = */site-packages/* """) if env.PYVERSION >= (3, 4): start_methods = ['fork', 'spawn'] else: start_methods = [''] for start_method in start_methods: if start_method and start_method not in multiprocessing.get_all_start_methods(): continue out = self.run_command("coverage run --rcfile=multi.rc multi.py %s" % (start_method,)) self.assertEqual(out.rstrip(), expected_out) out = self.run_command("coverage combine") self.assertEqual(out, "") out = self.run_command("coverage report -m") last_line = self.squeezed_lines(out)[-1] self.assertRegex(last_line, r"multi.py \d+ 0 \d+ 0 100%")
Example #13
Source File: _test_multiprocessing.py From Project-New-Reign---Nemesis-Main with GNU General Public License v3.0 | 5 votes |
def test_get_all(self): methods = multiprocessing.get_all_start_methods() if sys.platform == 'win32': self.assertEqual(methods, ['spawn']) else: self.assertTrue(methods == ['fork', 'spawn'] or methods == ['fork', 'spawn', 'forkserver'])
Example #14
Source File: multiprocCommon.py From Thespian with MIT License | 5 votes |
def get_multiproc_context(capabilities): best_concurrency = capabilities.get('Process Startup Method', 'fork') if hasattr(multiprocessing, 'get_context'): for each in (best_concurrency, 'fork', 'spawn'): if hasattr(multiprocessing, 'get_all_start_methods'): if each in multiprocessing.get_all_start_methods(): return multiprocessing.get_context(each) else: try: return multiprocessing.get_context(each) except ValueError: pass # invalid concurrency for this system return None