Python dummy_threading.Semaphore() Examples
The following are 7
code examples of dummy_threading.Semaphore().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
dummy_threading
, or try the search function
.
Example #1
Source File: status.py From git-repo with Apache License 2.0 | 6 votes |
def _StatusHelper(self, project, clean_counter, sem, quiet): """Obtains the status for a specific project. Obtains the status for a project, redirecting the output to the specified object. It will release the semaphore when done. Args: project: Project to get status of. clean_counter: Counter for clean projects. sem: Semaphore, will call release() when complete. output: Where to output the status. """ try: state = project.PrintWorkTreeStatus(quiet=quiet) if state == 'CLEAN': next(clean_counter) finally: sem.release()
Example #2
Source File: utils.py From torngas with BSD 3-Clause "New" or "Revised" License | 5 votes |
def __init__(self): self.mutex = threading.RLock() self.can_read = threading.Semaphore(0) self.can_write = threading.Semaphore(0) self.active_readers = 0 self.active_writers = 0 self.waiting_readers = 0 self.waiting_writers = 0
Example #3
Source File: synch.py From luscan-devel with GNU General Public License v2.0 | 5 votes |
def __init__(self): self.mutex = threading.RLock() self.can_read = threading.Semaphore(0) self.can_write = threading.Semaphore(0) self.active_readers = 0 self.active_writers = 0 self.waiting_readers = 0 self.waiting_writers = 0
Example #4
Source File: synch.py From python-compat-runtime with Apache License 2.0 | 5 votes |
def __init__(self): self.mutex = threading.RLock() self.can_read = threading.Semaphore(0) self.can_write = threading.Semaphore(0) self.active_readers = 0 self.active_writers = 0 self.waiting_readers = 0 self.waiting_writers = 0
Example #5
Source File: sync.py From git-repo with Apache License 2.0 | 4 votes |
def _Fetch(self, projects, opt, err_event): fetched = set() lock = _threading.Lock() pm = Progress('Fetching projects', len(projects), always_print_percentage=opt.quiet) objdir_project_map = dict() for project in projects: objdir_project_map.setdefault(project.objdir, []).append(project) threads = set() sem = _threading.Semaphore(self.jobs) for project_list in objdir_project_map.values(): # Check for any errors before running any more tasks. # ...we'll let existing threads finish, though. if err_event.isSet() and opt.fail_fast: break sem.acquire() kwargs = dict(opt=opt, projects=project_list, sem=sem, lock=lock, fetched=fetched, pm=pm, err_event=err_event, clone_filter=self.manifest.CloneFilter) if self.jobs > 1: t = _threading.Thread(target=self._FetchProjectList, kwargs=kwargs) # Ensure that Ctrl-C will not freeze the repo process. t.daemon = True threads.add(t) t.start() else: self._FetchProjectList(**kwargs) for t in threads: t.join() pm.end() self._fetch_times.Save() if not self.manifest.IsArchive: self._GCProjects(projects, opt, err_event) return fetched
Example #6
Source File: sync.py From git-repo with Apache License 2.0 | 4 votes |
def _Checkout(self, all_projects, opt, err_event, err_results): """Checkout projects listed in all_projects Args: all_projects: List of all projects that should be checked out. opt: Program options returned from optparse. See _Options(). err_event: We'll set this event in the case of an error (after printing out info about the error). err_results: A list of strings, paths to git repos where checkout failed. """ # Perform checkouts in multiple threads when we are using partial clone. # Without partial clone, all needed git objects are already downloaded, # in this situation it's better to use only one process because the checkout # would be mostly disk I/O; with partial clone, the objects are only # downloaded when demanded (at checkout time), which is similar to the # Sync_NetworkHalf case and parallelism would be helpful. if self.manifest.CloneFilter: syncjobs = self.jobs else: syncjobs = 1 lock = _threading.Lock() pm = Progress('Checking out projects', len(all_projects)) threads = set() sem = _threading.Semaphore(syncjobs) for project in all_projects: # Check for any errors before running any more tasks. # ...we'll let existing threads finish, though. if err_event.isSet() and opt.fail_fast: break sem.acquire() if project.worktree: kwargs = dict(opt=opt, sem=sem, project=project, lock=lock, pm=pm, err_event=err_event, err_results=err_results) if syncjobs > 1: t = _threading.Thread(target=self._CheckoutWorker, kwargs=kwargs) # Ensure that Ctrl-C will not freeze the repo process. t.daemon = True threads.add(t) t.start() else: self._CheckoutWorker(**kwargs) for t in threads: t.join() pm.end()
Example #7
Source File: sync.py From git-repo with Apache License 2.0 | 4 votes |
def _GCProjects(self, projects, opt, err_event): gc_gitdirs = {} for project in projects: # Make sure pruning never kicks in with shared projects. if (not project.use_git_worktrees and len(project.manifest.GetProjectsWithName(project.name)) > 1): print('%s: Shared project %s found, disabling pruning.' % (project.relpath, project.name)) if git_require((2, 7, 0)): project.EnableRepositoryExtension('preciousObjects') else: # This isn't perfect, but it's the best we can do with old git. print('%s: WARNING: shared projects are unreliable when using old ' 'versions of git; please upgrade to git-2.7.0+.' % (project.relpath,), file=sys.stderr) project.config.SetString('gc.pruneExpire', 'never') gc_gitdirs[project.gitdir] = project.bare_git if multiprocessing: cpu_count = multiprocessing.cpu_count() else: cpu_count = 1 jobs = min(self.jobs, cpu_count) if jobs < 2: for bare_git in gc_gitdirs.values(): bare_git.gc('--auto') return config = {'pack.threads': cpu_count // jobs if cpu_count > jobs else 1} threads = set() sem = _threading.Semaphore(jobs) def GC(bare_git): try: try: bare_git.gc('--auto', config=config) except GitError: err_event.set() except Exception: err_event.set() raise finally: sem.release() for bare_git in gc_gitdirs.values(): if err_event.isSet() and opt.fail_fast: break sem.acquire() t = _threading.Thread(target=GC, args=(bare_git,)) t.daemon = True threads.add(t) t.start() for t in threads: t.join()