Python os.path.dirname() Examples
The following are 30
code examples of os.path.dirname().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
os.path
, or try the search function
.
Example #1
Source File: test_pep8.py From hydrus with MIT License | 8 votes |
def test_pep8(self): """Test method to check PEP8 compliance over the entire project.""" self.file_structure = dirname(dirname(abspath(__file__))) print("Testing for PEP8 compliance of python files in {}".format( self.file_structure)) style = pep8.StyleGuide() style.options.max_line_length = 100 # Set this to desired maximum line length filenames = [] # Set this to desired folder location for root, _, files in os.walk(self.file_structure): python_files = [f for f in files if f.endswith( '.py') and "examples" not in root] for file in python_files: if len(root.split('samples')) != 2: # Ignore samples directory filename = '{0}/{1}'.format(root, file) filenames.append(filename) check = style.check_files(filenames) self.assertEqual(check.total_errors, 0, 'PEP8 style errors: %d' % check.total_errors)
Example #2
Source File: venv_update.py From mealpy with MIT License | 6 votes |
def has_system_site_packages(interpreter): # TODO: unit-test system_site_packages = check_output(( interpreter, '-c', # stolen directly from virtualenv's site.py """\ import site, os.path print( 0 if os.path.exists( os.path.join(os.path.dirname(site.__file__), 'no-global-site-packages.txt') ) else 1 )""" )) system_site_packages = int(system_site_packages) assert system_site_packages in (0, 1) return bool(system_site_packages)
Example #3
Source File: ta_data_loader.py From misp42splunk with GNU Lesser General Public License v3.0 | 6 votes |
def _read_default_settings(): cur_dir = op.dirname(op.abspath(__file__)) setting_file = op.join(cur_dir,"../../","splunktalib", "setting.conf") parser = configparser.ConfigParser() parser.read(setting_file) settings = {} keys = ("process_size", "thread_min_size", "thread_max_size", "task_queue_size") for option in keys: try: settings[option] = parser.get("global", option) except configparser.NoOptionError: settings[option] = -1 try: settings[option] = int(settings[option]) except ValueError: settings[option] = -1 log.logger.debug("settings: %s", settings) return settings
Example #4
Source File: fix_absolute_import.py From misp42splunk with GNU Lesser General Public License v3.0 | 6 votes |
def probably_a_local_import(self, imp_name): """ Like the corresponding method in the base class, but this also supports Cython modules. """ if imp_name.startswith(u"."): # Relative imports are certainly not local imports. return False imp_name = imp_name.split(u".", 1)[0] base_path = dirname(self.filename) base_path = join(base_path, imp_name) # If there is no __init__.py next to the file its not in a package # so can't be a relative import. if not exists(join(dirname(base_path), "__init__.py")): return False for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]: if exists(base_path + ext): return True return False
Example #5
Source File: fix_absolute_import.py From misp42splunk with GNU Lesser General Public License v3.0 | 6 votes |
def probably_a_local_import(self, imp_name): """ Like the corresponding method in the base class, but this also supports Cython modules. """ if imp_name.startswith(u"."): # Relative imports are certainly not local imports. return False imp_name = imp_name.split(u".", 1)[0] base_path = dirname(self.filename) base_path = join(base_path, imp_name) # If there is no __init__.py next to the file its not in a package # so can't be a relative import. if not exists(join(dirname(base_path), "__init__.py")): return False for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]: if exists(base_path + ext): return True return False
Example #6
Source File: __init__.py From ALF with Apache License 2.0 | 6 votes |
def import_helper(): from os.path import dirname import imp possible_libs = ["_alf_grammar.win32", "_alf_grammar.ntoarm", "_alf_grammar.ntox86", "_alf_grammar.linux"] found_lib = False for i in possible_libs: fp = None try: fp, pathname, description = imp.find_module(i, [dirname(__file__)]) _mod = imp.load_module("_alf_grammar", fp, pathname, description) found_lib = True break except ImportError: pass finally: if fp: fp.close() if not found_lib: raise ImportError("Failed to load _alf_grammar module") return _mod
Example #7
Source File: test_artifact.py From calmjs with GNU General Public License v2.0 | 6 votes |
def test_existing_removed(self): # force an existing file target = self.registry.records[('app', 'nothing.js')] os.mkdir(dirname(target)) with open(target, 'w'): pass with pretty_logging(stream=mocks.StringIO()) as stream: self.registry.process_package('app') log = stream.getvalue() self.assertIn( "package 'app' has declared 3 entry points for the " "'calmjs.artifacts' registry for artifact construction", log ) log = stream.getvalue() self.assertIn("removing existing export target at ", log) self.assertFalse(exists(target))
Example #8
Source File: ta_data_loader.py From misp42splunk with GNU Lesser General Public License v3.0 | 6 votes |
def _read_default_settings(): cur_dir = op.dirname(op.abspath(__file__)) setting_file = op.join(cur_dir,"../../","splunktalib", "setting.conf") parser = configparser.ConfigParser() parser.read(setting_file) settings = {} keys = ("process_size", "thread_min_size", "thread_max_size", "task_queue_size") for option in keys: try: settings[option] = parser.get("global", option) except configparser.NoOptionError: settings[option] = -1 try: settings[option] = int(settings[option]) except ValueError: settings[option] = -1 log.logger.debug("settings: %s", settings) return settings
Example #9
Source File: toolchain.py From calmjs with GNU General Public License v2.0 | 6 votes |
def compile_bundle_entry(self, spec, entry): """ Handler for each entry for the bundle method of the compile process. This copies the source file or directory into the build directory. """ modname, source, target, modpath = entry bundled_modpath = {modname: modpath} bundled_target = {modname: target} export_module_name = [] if isfile(source): export_module_name.append(modname) copy_target = join(spec[BUILD_DIR], target) if not exists(dirname(copy_target)): makedirs(dirname(copy_target)) shutil.copy(source, copy_target) elif isdir(source): copy_target = join(spec[BUILD_DIR], modname) shutil.copytree(source, copy_target) return bundled_modpath, bundled_target, export_module_name
Example #10
Source File: mx_javamodules.py From mx with GNU General Public License v2.0 | 6 votes |
def get_jmod_path(self, respect_stripping=True, alt_module_info_name=None): """ Gets the path to the .jmod file corresponding to this module descriptor. :param bool respect_stripping: Specifies whether or not to return a path to a stripped .jmod file if this module is based on a dist """ if respect_stripping and self.dist is not None: assert alt_module_info_name is None, 'alternate modules not supported for stripped dist ' + self.dist.name return join(dirname(self.dist.path), self.name + '.jmod') if self.dist is not None: qualifier = '_' + alt_module_info_name if alt_module_info_name else '' return join(dirname(self.dist.original_path()), self.name + qualifier + '.jmod') if self.jarpath: return join(dirname(self.jarpath), self.name + '.jmod') assert self.jdk, self.name p = join(self.jdk.home, 'jmods', self.name + '.jmod') assert exists(p), p return p
Example #11
Source File: domainUtil.py From hsds with Apache License 2.0 | 6 votes |
def getParentDomain(domain): """Get parent domain of given domain. E.g. getParentDomain("www.hdfgroup.org") returns "hdfgroup.org" Return None if the given domain is already a top-level domain. """ if domain.endswith(DOMAIN_SUFFIX): n = len(DOMAIN_SUFFIX) - 1 domain = domain[:-n] bucket = getBucketForDomain(domain) domain_path = getPathForDomain(domain) if len(domain_path) > 1 and domain_path[-1] == '/': domain_path = domain_path[:-1] dirname = op.dirname(domain_path) if bucket: parent = bucket + dirname else: parent = dirname if not parent: parent = None return parent
Example #12
Source File: __init__.py From aws-ops-automator with Apache License 2.0 | 6 votes |
def all_handlers(): global __actions if __actions is None: __actions = [] current = abspath(os.getcwd()) while True: if isdir(os.path.join(current, "handlers")): break parent = dirname(current) if parent == current: # at top level raise Exception("Could not find handlers directory") else: current = parent for f in listdir(os.path.join(current, "handlers")): if isfile(join(current, "handlers", f)) and f.endswith("_{}.py".format(HANDLER.lower())): module_name = HANDLERS_MODULE_NAME.format(f[0:-len(".py")]) m = _get_module(module_name) cls = _get_handler_class(m) if cls is not None: handler_name = cls[0] __actions.append(handler_name) return __actions
Example #13
Source File: domainUtil.py From hsds with Apache License 2.0 | 6 votes |
def getParentDomain(domain): """Get parent domain of given domain. E.g. getParentDomain("www.hdfgroup.org") returns "hdfgroup.org" Return None if the given domain is already a top-level domain. """ if domain.endswith(DOMAIN_SUFFIX): n = len(DOMAIN_SUFFIX) - 1 domain = domain[:-n] bucket = getBucketForDomain(domain) domain_path = getPathForDomain(domain) if len(domain_path) > 1 and domain_path[-1] == '/': domain_path = domain_path[:-1] dirname = op.dirname(domain_path) if bucket: parent = bucket + dirname else: parent = dirname if not parent: parent = None return parent
Example #14
Source File: mdbt.py From ConvLab with MIT License | 6 votes |
def cached_path(file_path, cached_dir=None): if not cached_dir: cached_dir = str(Path(Path.home() / '.tatk') / "cache") return allennlp_cached_path(file_path, cached_dir) # DATA_PATH = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))), 'data/mdbt') # VALIDATION_URL = os.path.join(DATA_PATH, "data/validate.json") # WORD_VECTORS_URL = os.path.join(DATA_PATH, "word-vectors/paragram_300_sl999.txt") # TRAINING_URL = os.path.join(DATA_PATH, "data/train.json") # ONTOLOGY_URL = os.path.join(DATA_PATH, "data/ontology.json") # TESTING_URL = os.path.join(DATA_PATH, "data/test.json") # MODEL_URL = os.path.join(DATA_PATH, "models/model-1") # GRAPH_URL = os.path.join(DATA_PATH, "graphs/graph-1") # RESULTS_URL = os.path.join(DATA_PATH, "results/log-1.txt") # KB_URL = os.path.join(DATA_PATH, "data/") # TODO: yaoqin # TRAIN_MODEL_URL = os.path.join(DATA_PATH, "train_models/model-1") # TRAIN_GRAPH_URL = os.path.join(DATA_PATH, "train_graph/graph-1")
Example #15
Source File: test_0004_vna.py From pyscf with Apache License 2.0 | 6 votes |
def test_vna_lih(self): dname = dirname(abspath(__file__)) n = nao(label='lih', cd=dname) m = 200 dvec,midv = 2*(n.atom2coord[1] - n.atom2coord[0])/m, (n.atom2coord[1] + n.atom2coord[0])/2.0 vgrid = np.tensordot(np.array(range(-m,m+1)), dvec, axes=0) + midv sgrid = np.array(range(-m,m+1)) * np.sqrt((dvec*dvec).sum()) #vgrid = np.array([[-1.517908564663352e+00, 1.180550033093826e+00,0.000000000000000e+00]]) vna = n.vna(vgrid) #for v,r in zip(vna,vgrid): # print("%23.15e %23.15e %23.15e %23.15e"%(r[0], r[1], r[2], v)) #print(vna.shape, sgrid.shape) np.savetxt('vna_lih_0004.txt', np.row_stack((sgrid, vna)).T) ref = np.loadtxt(dname+'/vna_lih_0004.txt-ref') for r,d in zip(ref[:,1],vna): self.assertAlmostEqual(r,d)
Example #16
Source File: test_0091_tddft_x_zip_na20.py From pyscf with Apache License 2.0 | 6 votes |
def test_x_zip_feature_na20_chain(self): """ This a test for compression of the eigenvectos at higher energies """ dname = dirname(abspath(__file__)) siesd = dname+'/sodium_20' x = td_c(label='siesta', cd=siesd,x_zip=True, x_zip_emax=0.25,x_zip_eps=0.05,jcutoff=7,xc_code='RPA',nr=128, fermi_energy=-0.0913346431431985) eps = 0.005 ww = np.arange(0.0, 0.5, eps/2.0)+1j*eps data = np.array([ww.real*27.2114, -x.comp_polariz_inter_ave(ww).imag]) fname = 'na20_chain.tddft_iter_rpa.omega.inter.ave.x_zip.txt' np.savetxt(fname, data.T, fmt=['%f','%f']) #print(__file__, fname) data_ref = np.loadtxt(dname+'/'+fname+'-ref') #print(' x.rf0_ncalls ', x.rf0_ncalls) #print(' x.matvec_ncalls ', x.matvec_ncalls) self.assertTrue(np.allclose(data_ref,data.T, rtol=1.0e-1, atol=1e-06))
Example #17
Source File: lib_util.py From misp42splunk with GNU Lesser General Public License v3.0 | 5 votes |
def get_app_root_dir(): """Return the root dir of app""" return op.dirname(op.dirname(op.abspath(get_main_file())))
Example #18
Source File: fix_import.py From misp42splunk with GNU Lesser General Public License v3.0 | 5 votes |
def probably_a_local_import(self, imp_name): if imp_name.startswith("."): # Relative imports are certainly not local imports. return False imp_name = imp_name.split(".", 1)[0] base_path = dirname(self.filename) base_path = join(base_path, imp_name) # If there is no __init__.py next to the file its not in a package # so can't be a relative import. if not exists(join(dirname(base_path), "__init__.py")): return False for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd"]: if exists(base_path + ext): return True return False
Example #19
Source File: fileClient.py From hsds with Apache License 2.0 | 5 votes |
def delete_object(self, key, bucket=None): """ Deletes the object at the given key """ self._validateBucket(bucket) self._validateKey(key) filepath = self._getFilePath(bucket, key) start_time = time.time() log.debug(f"fileClient.delete_object({bucket}/{key} start: {start_time}") try: log.debug(f"os.remove({filepath})") remove(filepath) dir_name = pp.dirname(filepath) if not listdir(dir_name) and pp.basename(dir_name) != bucket: # direcctory is empty, remove rmdir(dir_name) finish_time = time.time() log.info(f"fileClient.delete_object({key} bucket={bucket}) start={start_time:.4f} finish={finish_time:.4f} elapsed={finish_time-start_time:.4f}") except IOError as ioe: msg = f"fileClient: IOError deleting {bucket}/{key}: {ioe}" log.warn(msg) raise HTTPInternalServerError() except CancelledError as cle: self._file_stats_increment("error_count") msg = f"CancelledError deleting s3 obj {key}: {cle}" log.error(msg) raise HTTPInternalServerError() except Exception as e: self._file_stats_increment("error_count") msg = f"Unexpected Exception {type(e)} deleting s3 obj {key}: {e}" log.error(msg) raise HTTPInternalServerError() await asyncio.sleep(0) # for async compat
Example #20
Source File: fileClient.py From hsds with Apache License 2.0 | 5 votes |
def _validateBucket(self, bucket): if not bucket or pp.isabs(bucket) or pp.dirname(bucket): msg = "invalid bucket name" log.warn(msg) raise HTTPBadRequest(reason=msg)
Example #21
Source File: PinYin.py From Jtyoui with MIT License | 5 votes |
def load_pin_yin(tone=False): """加载拼音模型字典 参数:True 有声调, False没有声调 :param tone: 是否要加载声调 :return: 拼音模型字典 """ d = {} file_zip = path.dirname(path.abspath(__file__)) file_zip = path.join(path.dirname(file_zip), 'file_zip', 'py.zip') f = zipfile.ZipFile(file_zip) fp = f.read('py.txt') lines = fp.decode('utf-8').split('\n') for line in lines: v, k = line.split('#') if ',' in v: v = v[:v.index(',')] if tone: d.setdefault(k, v) else: vs = '' for v_ in v: vs += letter_maps[v_] d.setdefault(k, vs) return d
Example #22
Source File: local.py From py with MIT License | 5 votes |
def new(self, **kw): """ create a modified version of this path. the following keyword arguments modify various path parts:: a:/some/path/to/a/file.ext xx drive xxxxxxxxxxxxxxxxx dirname xxxxxxxx basename xxxx purebasename xxx ext """ obj = object.__new__(self.__class__) if not kw: obj.strpath = self.strpath return obj drive, dirname, basename, purebasename,ext = self._getbyspec( "drive,dirname,basename,purebasename,ext") if 'basename' in kw: if 'purebasename' in kw or 'ext' in kw: raise ValueError("invalid specification %r" % kw) else: pb = kw.setdefault('purebasename', purebasename) try: ext = kw['ext'] except KeyError: pass else: if ext and not ext.startswith('.'): ext = '.' + ext kw['basename'] = pb + ext if ('dirname' in kw and not kw['dirname']): kw['dirname'] = drive else: kw.setdefault('dirname', dirname) kw.setdefault('sep', self.sep) obj.strpath = normpath( "%(dirname)s%(sep)s%(basename)s" % kw) return obj
Example #23
Source File: launch.py From evolution-strategies-starter with MIT License | 5 votes |
def upload_archive(exp_name, archive_excludes, s3_bucket): import hashlib, os.path as osp, subprocess, tempfile, uuid, sys # Archive this package thisfile_dir = osp.dirname(osp.abspath(__file__)) pkg_parent_dir = osp.abspath(osp.join(thisfile_dir, '..', '..')) pkg_subdir = osp.basename(osp.abspath(osp.join(thisfile_dir, '..'))) assert osp.abspath(__file__) == osp.join(pkg_parent_dir, pkg_subdir, 'scripts', 'launch.py'), 'You moved me!' # Run tar tmpdir = tempfile.TemporaryDirectory() local_archive_path = osp.join(tmpdir.name, '{}.tar.gz'.format(uuid.uuid4())) tar_cmd = ["tar", "-zcvf", local_archive_path, "-C", pkg_parent_dir] for pattern in archive_excludes: tar_cmd += ["--exclude", pattern] tar_cmd += ["-h", pkg_subdir] highlight(" ".join(tar_cmd)) if sys.platform == 'darwin': # Prevent Mac tar from adding ._* files env = os.environ.copy() env['COPYFILE_DISABLE'] = '1' subprocess.check_call(tar_cmd, env=env) else: subprocess.check_call(tar_cmd) # Construct remote path to place the archive on S3 with open(local_archive_path, 'rb') as f: archive_hash = hashlib.sha224(f.read()).hexdigest() remote_archive_path = '{}/{}_{}.tar.gz'.format(s3_bucket, exp_name, archive_hash) # Upload upload_cmd = ["aws", "s3", "cp", local_archive_path, remote_archive_path] highlight(" ".join(upload_cmd)) subprocess.check_call(upload_cmd) presign_cmd = ["aws", "s3", "presign", remote_archive_path, "--expires-in", str(60 * 60 * 24 * 30)] highlight(" ".join(presign_cmd)) remote_url = subprocess.check_output(presign_cmd).decode("utf-8").strip() return remote_url
Example #24
Source File: setup.py From nose-htmloutput with BSD 2-Clause "Simplified" License | 5 votes |
def read(*names, **kwargs): return io.open( join(dirname(__file__), *names), encoding=kwargs.get("encoding", "utf8") ).read()
Example #25
Source File: test_0004_vna.py From pyscf with Apache License 2.0 | 5 votes |
def test_water_vkb(self): """ This """ from numpy import einsum, array import os dname = os.path.dirname(os.path.abspath(__file__)) sv = nao(label='water', cd=dname)
Example #26
Source File: setup.py From libTLDA with MIT License | 5 votes |
def read(fname): """Read filename""" return open(os.path.join(os.path.dirname(__file__), fname)).read()
Example #27
Source File: test_0004_vna.py From pyscf with Apache License 2.0 | 5 votes |
def test_vna_n2(self): dname = dirname(abspath(__file__)) n = nao(label='n2', cd=dname) m = 200 dvec,midv = 2*(n.atom2coord[1] - n.atom2coord[0])/m, (n.atom2coord[1] + n.atom2coord[0])/2.0 vgrid = np.tensordot(np.array(range(-m,m+1)), dvec, axes=0) + midv sgrid = np.array(range(-m,m+1)) * np.sqrt((dvec*dvec).sum()) vna = n.vna(vgrid) #print(vna.shape, sgrid.shape) #np.savetxt('vna_n2_0004.txt', np.row_stack((sgrid, vna)).T) ref = np.loadtxt(dname+'/vna_n2_0004.txt-ref') for r,d in zip(ref[:,1],vna): self.assertAlmostEqual(r,d)
Example #28
Source File: test_0070_tddft_upkc.py From pyscf with Apache License 2.0 | 5 votes |
def test_tddft_upkc(self): """ This is a comparison of two equivalent ways of computing the polarizability for water molecule """ td = tddft_iter_2ord(label='water', cd=dirname(abspath(__file__)),jcutoff=7,xc_code='RPA',verbosity=0) omegas = np.arange(0.0,1.0,0.005)+1j*0.01 pxx1 = -td.comp_polariz_nonin_xx(omegas).imag data1 = np.array([omegas.real*27.2114, pxx1]) np.savetxt('water.tddft_iter_nonin.txt', data1.T) #print(' td.rf0_ncalls ', td.rf0_ncalls) #print(' td.matvec_ncalls ', td.matvec_ncalls) pxx1 = -td.comp_polariz_inter_xx(omegas).imag data1 = np.array([omegas.real*27.2114, pxx1]) np.savetxt('water.tddft_iter_unit.txt', data1.T) #print(' td.rf0_ncalls ', td.rf0_ncalls) #print(' td.matvec_ncalls ', td.matvec_ncalls) pxx2 = td.polariz_upkc(omegas) wp = np.zeros((2*pxx2.shape[1]+1,pxx2.shape[0])) wp[0,:] = omegas.real*27.2114 wp[1:pxx2.shape[1]+1,:] = pxx2.real.T wp[pxx2.shape[1]+1:,:] = pxx2.imag.T np.savetxt('water.tddft_iter_upkc.txt', wp.T) #print(' td.rf0_ncalls ', td.rf0_ncalls) #print(' td.matvec_ncalls ', td.matvec_ncalls) pxx3 = -td.polariz_dckcd(omegas).imag data1 = np.array([omegas.real*27.2114, pxx3]) np.savetxt('water.tddft_iter_dckcd.txt', data1.T) #print(' td.rf0_ncalls ', td.rf0_ncalls) #print(' td.matvec_ncalls ', td.matvec_ncalls)
Example #29
Source File: __init__.py From qtsass with MIT License | 5 votes |
def example(*paths): """Get path to an example.""" return normpath(join(dirname(__file__), '..', 'examples', *paths))
Example #30
Source File: ca_certs_locater.py From misp42splunk with GNU Lesser General Public License v3.0 | 5 votes |
def _get_temp_cert_file_dir(): import __main__ app_root = op.dirname(op.dirname(op.abspath(__main__.__file__))) temp_dir = op.join(app_root, 'temp_certs') if not op.isdir(temp_dir): try: os.mkdir(temp_dir) except: pass for candidate in ['temp_certs', 'local', 'default']: dir_path = op.join(app_root, candidate) if op.isdir(dir_path): return dir_path return app_root