Python joblib.parallel_backend() Examples
The following are 14
code examples of joblib.parallel_backend().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
joblib
, or try the search function
.
Example #1
Source File: test_parallel.py From civis-python with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_template_submit(mock_file, mock_result, mock_pool): # Verify that creating child jobs from a template looks like we expect file_id = 17 mock_client = create_client_mock() mock_file.return_value = file_id factory = civis.parallel.make_backend_template_factory( from_template_id=1234, client=mock_client) n_calls = 3 register_parallel_backend('civis', factory) with parallel_backend('civis'): # NB: joblib >v0.11 relies on callbacks from the result object to # decide when it's done consuming inputs. We've mocked the result # object here, so Parallel must be called either with n_jobs=1 or # pre_dispatch='all' to consume the inputs all at once. parallel = Parallel(n_jobs=1, pre_dispatch='n_jobs') parallel(delayed(sqrt)(i ** 2) for i in range(n_calls)) assert mock_file.call_count == 3, "Upload 3 functions to run" assert mock_pool().submit.call_count == n_calls, "Run 3 functions" for this_call in mock_pool().submit.call_args_list: assert this_call == mock.call(JOBLIB_FUNC_FILE_ID=file_id) assert mock_result.call_count == 3, "Create 3 results"
Example #2
Source File: test_parallel.py From civis-python with BSD 3-Clause "New" or "Revised" License | 6 votes |
def test_civis_backend_pickles(mock_civis): # Test to make sure the backend will pickle. backend = civis.parallel._CivisBackend( setup_cmd='blah', from_template_id=-1, max_submit_retries=10, client='ha', remote_backend='cool', hidden=False) with parallel_backend(backend): Parallel(n_jobs=-1)([]) buff = io.BytesIO() pickle.dump(backend, buff) buff.seek(0) new_backend = pickle.load(buff) with parallel_backend(new_backend): Parallel(n_jobs=-1)([])
Example #3
Source File: predator_prey_dmt.py From PsyNeuLink with Apache License 2.0 | 6 votes |
def run_search(): from dask.distributed import Client, LocalCluster import joblib import hypertunity as ht #client = Client(scheduler_file='scheduler.json') client = Client() print(client) domain = ht.Domain({ "cost_rate": set([-.8]) }) # with joblib.parallel_backend('dask'): # with joblib.Parallel() as parallel: # print("Doing the work ... ") # results = parallel(joblib.delayed(run_games)(*domain.sample().as_namedtuple()) for s in range(1)) # # print(results) run_games(-.8)
Example #4
Source File: test_threaded.py From pynndescent with BSD 2-Clause "Simplified" License | 5 votes |
def test_effective_n_jobs_with_context(): assert_equal(threaded.effective_n_jobs_with_context(), 1, "Default to 1 job") assert_equal( threaded.effective_n_jobs_with_context(-1), joblib.cpu_count(), "Use all cores with num_jobs=-1", ) assert_equal( threaded.effective_n_jobs_with_context(2), 2, "Use n_jobs if specified" ) with joblib.parallel_backend("threading"): assert_equal( threaded.effective_n_jobs_with_context(), joblib.cpu_count(), "Use all cores with context manager", ) with joblib.parallel_backend("threading", n_jobs=3): assert_equal( threaded.effective_n_jobs_with_context(), 3, "Use n_jobs from context manager", ) with joblib.parallel_backend("threading", n_jobs=3): assert_equal( threaded.effective_n_jobs_with_context(2), 2, "Use n_jobs specified rather than from context manager", )
Example #5
Source File: test_threaded.py From pynndescent with BSD 2-Clause "Simplified" License | 5 votes |
def test_nn_decent_with_parallel_backend(): np.random.seed(42) N = 100 D = 128 chunk_size = N // 8 n_neighbors = 25 data = np.random.rand(N, D).astype(np.float32) nn_indices, nn_distances = NNDescent( data, n_neighbors=n_neighbors, max_candidates=max_candidates, n_iters=1, random_state=42, delta=0, tree_init=False, seed_per_row=True, )._neighbor_graph with joblib.parallel_backend("threading"): nn_indices_threaded, nn_distances_threaded = NNDescent( data, n_neighbors=n_neighbors, max_candidates=max_candidates, n_iters=1, random_state=42, delta=0, tree_init=False, seed_per_row=True, )._neighbor_graph assert_allclose(nn_indices_threaded, nn_indices) assert_allclose(nn_distances_threaded, nn_distances)
Example #6
Source File: threaded.py From pynndescent with BSD 2-Clause "Simplified" License | 5 votes |
def effective_n_jobs_with_context(n_jobs=None): """Find the effective number of jobs, either specified directly, or from the joblib.parallel_backend context.""" if n_jobs is None: _, n_jobs_from_context = joblib.parallel.get_active_backend() n_jobs = n_jobs_from_context return joblib.effective_n_jobs(n_jobs)
Example #7
Source File: test_joblib.py From ray with Apache License 2.0 | 5 votes |
def test_ray_backend(shutdown_only): register_ray() from ray.util.joblib.ray_backend import RayBackend with joblib.parallel_backend("ray"): assert type(joblib.parallel.get_active_backend()[0]) == RayBackend
Example #8
Source File: test_joblib.py From ray with Apache License 2.0 | 5 votes |
def test_svm_single_node(shutdown_only): digits = load_digits() param_space = { "C": np.logspace(-6, 6, 10), "gamma": np.logspace(-8, 8, 10), "tol": np.logspace(-4, -1, 3), "class_weight": [None, "balanced"], } model = SVC(kernel="rbf") search = RandomizedSearchCV(model, param_space, cv=3, n_iter=2, verbose=10) register_ray() with joblib.parallel_backend("ray"): search.fit(digits.data, digits.target) assert ray.is_initialized()
Example #9
Source File: test_joblib.py From ray with Apache License 2.0 | 5 votes |
def test_svm_multiple_nodes(ray_start_cluster_2_nodes): digits = load_digits() param_space = { "C": np.logspace(-6, 6, 30), "gamma": np.logspace(-8, 8, 30), "tol": np.logspace(-4, -1, 30), "class_weight": [None, "balanced"], } model = SVC(kernel="rbf") search = RandomizedSearchCV(model, param_space, cv=5, n_iter=2, verbose=10) register_ray() with joblib.parallel_backend("ray"): search.fit(digits.data, digits.target) assert ray.is_initialized()
Example #10
Source File: test_joblib.py From ray with Apache License 2.0 | 5 votes |
def test_cross_validation(shutdown_only): register_ray() iris = load_iris() clf = SVC(kernel="linear", C=1, random_state=0) with joblib.parallel_backend("ray", n_jobs=5): accuracy = cross_val_score(clf, iris.data, iris.target, cv=5) assert len(accuracy) == 5 for result in accuracy: assert result > 0.95
Example #11
Source File: fit.py From palladium with Apache License 2.0 | 5 votes |
def with_parallel_backend( estimator, backend, methods=('fit', 'predict', 'predict_proba'), **backend_params ): def wrapper(func): def wrapped(*args, **kwargs): with parallel_backend(backend, **backend_params): return func(*args, **kwargs) return wrapped for name in methods: setattr(estimator, name, wrapper(getattr(estimator, name))) return estimator
Example #12
Source File: test_study.py From optuna with MIT License | 5 votes |
def test_optimize_parallel_storage_warning(recwarn): # type: (WarningsRecorder) -> None study = optuna.create_study() # Default joblib backend is threading and no warnings will be captured. study.optimize(lambda t: t.suggest_uniform("x", 0, 1), n_trials=20, n_jobs=2) assert len(recwarn) == 0 with pytest.warns(UserWarning): with joblib.parallel_backend("loky"): study.optimize(lambda t: t.suggest_uniform("x", 0, 1), n_trials=20, n_jobs=2)
Example #13
Source File: test_parallel.py From civis-python with BSD 3-Clause "New" or "Revised" License | 4 votes |
def _test_retries_helper(num_failures, max_submit_retries, should_fail, from_template_id, mock_file_to_civis, mock_result_cls, mock_custom_exec_cls, mock_executor_cls): mock_file_to_civis.return_value = 0 mock_result_cls.return_value.get.return_value = [123] # A function to raise fake API errors the first num_failures times it is # called. counter = {'n_failed': 0} def mock_submit(fn='', *args, **kwargs): if counter['n_failed'] < num_failures: counter['n_failed'] += 1 raise CivisAPIError(mock.MagicMock()) else: return mock.MagicMock(spec=ContainerFuture) mock_custom_exec_cls.return_value.submit.side_effect = mock_submit mock_executor_cls.return_value.submit.side_effect = mock_submit if from_template_id: factory = civis.parallel.make_backend_template_factory( from_template_id=from_template_id, max_submit_retries=max_submit_retries, client=create_client_mock()) else: factory = civis.parallel.make_backend_factory( max_submit_retries=max_submit_retries, client=create_client_mock()) register_parallel_backend('civis', factory) with parallel_backend('civis'): # NB: joblib >v0.11 relies on callbacks from the result object to # decide when it's done consuming inputs. We've mocked the result # object here, so Parallel must be called either with n_jobs=1 or # pre_dispatch='all' to consume the inputs all at once. parallel = Parallel(n_jobs=1, pre_dispatch='n_jobs') if should_fail: with pytest.raises(civis.parallel.JobSubmissionError): parallel(delayed(sqrt)(i ** 2) for i in range(3)) else: parallel(delayed(sqrt)(i ** 2) for i in range(3))
Example #14
Source File: run_joblib_func.py From civis-python with BSD 3-Clause "New" or "Revised" License | 4 votes |
def worker_func(func_file_id): # Have the output File expire in 7 days. expires_at = (datetime.now() + timedelta(days=7)).isoformat() client = civis.APIClient() job_id = os.environ.get('CIVIS_JOB_ID') run_id = os.environ.get('CIVIS_RUN_ID') if not job_id or not run_id: raise RuntimeError("This function must be run inside a " "Civis container job.") # Run the function. result = None try: func, remote_backend = _robust_pickle_download( func_file_id, client=client, n_retries=5, delay=0.5) _backend = _setup_remote_backend(remote_backend) # graceful nested context managers are ~hard across python versions, # this just works... if NO_SKLEARN: with _joblib_para_backend(_backend): result = func() else: # we are using the nested context managers to set the joblib # backend to the requested one in both copes of joblib, the # package and the copy shipped by sklearn at # `sklearn.externals.joblib`. joblib maintains the current # backend as global state in the package and thus there are # two backends to set when you have two copies of the package # in play. with _sklearn_para_backend(_backend): with _joblib_para_backend(_backend): result = func() except Exception: print("Error! Attempting to record exception.") # Wrap the exception in joblib's TransportableException # so that joblib can properly display the results. e_type, e_value, e_tb = sys.exc_info() text = format_exc(e_type, e_value, e_tb, context=10, tb_offset=1) result = TransportableException(text, e_type) raise finally: # Serialize the result and upload it to the Files API. if result is not None: # If the function exits without erroring, we may not have a result. result_buffer = BytesIO() cloudpickle.dump(result, result_buffer, pickle.HIGHEST_PROTOCOL) result_buffer.seek(0) output_name = "Results from Joblib job {} / run {}".format(job_id, run_id) output_file_id = _robust_file_to_civis(result_buffer, output_name, n_retries=5, delay=0.5, expires_at=expires_at, client=client) client.scripts.post_containers_runs_outputs(job_id, run_id, 'File', output_file_id) print("Results output to file ID: {}".format(output_file_id))