Python utils.run_command() Examples
The following are 22
code examples of utils.run_command().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
utils
, or try the search function
.
Example #1
Source File: rordeployer.py From cmdbac with Apache License 2.0 | 6 votes |
def get_runtime(self, version = None): if self.runtime != None: return self.runtime latest_successful_attempt = self.get_latest_successful_attempt() if latest_successful_attempt != None: return { 'executable': latest_successful_attempt.runtime.executable, 'version': latest_successful_attempt.runtime.version } else: if version != None: return { 'executable': 'ruby', 'version': version } else: out = utils.run_command('ruby -v')[1].split(' ') return { 'executable': 'ruby', 'version': out[1] } ## DEF
Example #2
Source File: kfp_client_utils.py From pipelines with Apache License 2.0 | 6 votes |
def compile_and_run_pipeline( client, experiment_id, pipeline_definition, input_params, output_file_dir, pipeline_name, ): pipeline_path = os.path.join(output_file_dir, pipeline_name) utils.run_command( f"dsl-compile --py {pipeline_definition} --output {pipeline_path}.yaml" ) run = client.run_pipeline( experiment_id, pipeline_name, f"{pipeline_path}.yaml", input_params ) return run.id
Example #3
Source File: extract.py From cmdbac with Apache License 2.0 | 6 votes |
def extract_forms(url, follow = "false", cookie_jar = None, filename = "forms.json"): utils.remove_file(os.path.join(os.path.dirname(__file__), filename)) if cookie_jar == None: try: out = utils.run_command('{} && {}'.format( utils.cd(os.path.dirname(os.path.abspath(__file__))), 'scrapy crawl form -o {} -a start_url="{}" -a follow={} -a proxy={}'.format(filename, url, follow, HTTP_PROXY)), EXTRACT_WAIT_TIME) except: out = utils.run_command('{} && {}'.format( utils.cd(os.path.dirname(os.path.abspath(__file__))), 'scrapy crawl form -o {} -a start_url="{}" -a follow={}'.format(filename, url, follow)), EXTRACT_WAIT_TIME) else: cookie_jar_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), filename.replace('.json', '.txt')) cookie_jar.save(cookie_jar_path) out = utils.run_command('{} && {}'.format( utils.cd(os.path.dirname(os.path.abspath(__file__))), 'scrapy crawl form_with_cookie -o {} -a start_url="{}" -a cookie_jar={}'.format(filename, url, cookie_jar_path)), EXTRACT_WAIT_TIME) with open(os.path.join(os.path.dirname(__file__), filename)) as json_forms: forms = json.load(json_forms) utils.remove_file(os.path.join(os.path.dirname(__file__), filename)) return forms
Example #4
Source File: extract.py From cmdbac with Apache License 2.0 | 6 votes |
def extract_urls(url, follow = "false", cookie_jar = None, filename = "urls.json"): utils.remove_file(os.path.join(os.path.dirname(__file__), filename)) if cookie_jar == None: try: out = utils.run_command('{} && {}'.format( utils.cd(os.path.dirname(os.path.abspath(__file__))), 'scrapy crawl url -o {} -a start_url="{}" -a follow={} -a proxy={}'.format(filename, url, follow, HTTP_PROXY)), EXTRACT_WAIT_TIME) except: out = utils.run_command('{} && {}'.format( utils.cd(os.path.dirname(os.path.abspath(__file__))), 'scrapy crawl url -o {} -a start_url="{}" -a follow={}'.format(filename, url, follow)), EXTRACT_WAIT_TIME) else: cookie_jar_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), filename.replace('.json', '.txt')) cookie_jar.save(cookie_jar_path) out = utils.run_command('{} && {}'.format( utils.cd(os.path.dirname(os.path.abspath(__file__))), 'scrapy crawl url_with_cookie -o {} -a start_url="{}" -a cookie_jar={}'.format(filename, url, cookie_jar_path)), EXTRACT_WAIT_TIME) with open(os.path.join(os.path.dirname(__file__), filename)) as json_urls: urls = json.load(json_urls) utils.remove_file(os.path.join(os.path.dirname(__file__), filename)) return urls
Example #5
Source File: rordeployer.py From cmdbac with Apache License 2.0 | 6 votes |
def install_requirements(self, path): if path: git_clone_error_cnt = 0 while True: out = self.run_command(path, 'bundle install --no-cache --clean') git_clone_error = re.search('Retrying git clone (.*) due to error', out[1]) if git_clone_error: if git_clone_error_cnt >= 3: break command = 'git clone {}'.format(git_clone_error.group(1)).replace('git://github.com/', 'https://github.com/') LOG.info('Fix Git Fetching Error : {}'.format(command)) utils.run_command(command) git_clone_error_cnt += 1 else: return out[1] return '' ## DEF
Example #6
Source File: djangodeployer.py From cmdbac with Apache License 2.0 | 5 votes |
def load_fixtures(self, path): LOG.info('Loading fixtures ...') for file in os.listdir(os.path.join(path, 'fixtures')): LOG.info('Loading fixtures: {}'.format(file)) command = '{} && {} && unset DJANGO_SETTINGS_MODULE && {}'.format( utils.to_env(self.base_path), utils.cd(path), "python manage.py loaddata {}".format(os.path.join(path, 'fixtures', file))) utils.run_command(command) ## DEF
Example #7
Source File: grailsdeployer.py From cmdbac with Apache License 2.0 | 5 votes |
def install_requirements(self, path): if path: command = '{} && export JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64 && chmod 777 grailsw && ./grailsw compile'.format(utils.cd(path)) out = utils.run_command(command) if out[1] == '': return out[2] else: return out[1] return '' ## DEF
Example #8
Source File: nodedeployer.py From cmdbac with Apache License 2.0 | 5 votes |
def find_port(self): out = utils.run_command('netstat -nlp | grep -i "node"') port = re.search('0 :::(\d+)', out[1]) if port: self.port = port.group(1)
Example #9
Source File: nodedeployer.py From cmdbac with Apache License 2.0 | 5 votes |
def get_runtime(self): out = utils.run_command('node -v') return { 'executable': 'node', 'version': out[1][1:] } ## DEF
Example #10
Source File: nodedeployer.py From cmdbac with Apache License 2.0 | 5 votes |
def install_requirements(self, path): if path: command = '{} && npm install'.format(utils.cd(path)) out = utils.run_command(command) if out[1] == '': return out[2] else: return out[1] return '' ## DEF
Example #11
Source File: rordeployer.py From cmdbac with Apache License 2.0 | 5 votes |
def create_superuser(self, path): LOG.info('Creating superuser ...') out = self.run_command(path, 'rails runner "{}"'.format("User.create!(:email=>'admin@test.com',:username=>'admin',:password=>'admin')")) return out ## DEF
Example #12
Source File: rordeployer.py From cmdbac with Apache License 2.0 | 5 votes |
def sync_server(self, path): LOG.info('Syncing server ...') out = self.run_command(path, 'bundle exec rake db:migrate') if 'rake aborted!' in out[1]: LOG.info(out) return False if self.repo.setup_scripts != None: for command in self.repo.setup_scripts.split('\n'): self.run_command(path, command) return True ## DEF
Example #13
Source File: rordeployer.py From cmdbac with Apache License 2.0 | 5 votes |
def run_command(self, path, command): commands = '{} && {} && {}'.format( utils.cd(path), utils.use_ruby_version(self.runtime['version']), command) return utils.run_command(commands)
Example #14
Source File: basedeployer.py From cmdbac with Apache License 2.0 | 5 votes |
def check_server(self): LOG.info("Checking server ...") url = self.get_main_url() LOG.info("Main Url : {}".format(url)) command = 'wget --spider {}'.format(url) out = utils.run_command(command) LOG.info(out) if not "200 OK" in out[2]: return ATTEMPT_STATUS_RUNNING_ERROR else: return ATTEMPT_STATUS_SUCCESS ## DEF
Example #15
Source File: djangodeployer.py From cmdbac with Apache License 2.0 | 5 votes |
def create_superuser(self, path): LOG.info('Creating superuser ...') command = '{} && {} && unset DJANGO_SETTINGS_MODULE && {}'.format( utils.to_env(self.base_path), utils.cd(path), """ echo "from django.contrib.auth.models import User; User.objects.create_superuser('admin', 'admin@test.com', 'admin')" | python manage.py shell """) return utils.run_command(command) ## DEF
Example #16
Source File: djangodeployer.py From cmdbac with Apache License 2.0 | 5 votes |
def sync_server(self, path): LOG.info('Syncing server ...') command = '{} && {} && unset DJANGO_SETTINGS_MODULE && python manage.py syncdb --noinput'.format( utils.to_env(self.base_path), utils.cd(path)) output = utils.run_command(command) if 'Unknown command' in output[2]: command = '{} && {} && unset DJANGO_SETTINGS_MODULE && python manage.py migrate --noinput'.format( utils.to_env(self.base_path), utils.cd(path)) return utils.run_command(command) ## DEF
Example #17
Source File: argo_utils.py From pipelines with Apache License 2.0 | 5 votes |
def print_workflow_logs(workflow_name): output = utils.run_command( f"argo logs {workflow_name} -n {utils.get_kfp_namespace()}" ) print(f"workflow logs:\n", output.decode())
Example #18
Source File: sge.py From anvio with GNU General Public License v3.0 | 5 votes |
def clusterize(self, parts): # create a 8 digits random identifier for cluster jobs: identifier = ''.join(random.choice(string.ascii_uppercase) for x in range(10)) for part in parts: command = self.command % {'binary': self.binary, 'part': part} # create sh file shell_script = part + '.sh' open(shell_script, 'w').write(QSUB_SCRIPT % {'log': part + '.log', 'identifier': identifier, 'command': command}) # submit script to cluster utils.run_command('qsub %s' % shell_script) while True: qstat_info = self.get_qstat_info(identifier) total_processes = sum(qstat_info.values()) if total_processes == 0: break self.progress.update('Qstat Info :: Total Jobs: %s, %s' % (pp(total_processes), ', '.join(['%s: %s' % (x, pp(qstat_info[x])) for x in qstat_info]))) time.sleep(5) return True
Example #19
Source File: plex.py From plex_autoscan with GNU General Public License v3.0 | 5 votes |
def analyze_item(config, scan_path): if not os.path.exists(config['PLEX_DATABASE_PATH']): logger.warning("Could not analyze of '%s' because Plex database could not be found.", scan_path) return # get files metadata_item_id metadata_item_ids = get_file_metadata_ids(config, scan_path) if metadata_item_ids is None or not len(metadata_item_ids): logger.warning("Aborting analysis of '%s' because could not find any 'metadata_item_id' for it.", scan_path) return metadata_item_id = ','.join(str(x) for x in metadata_item_ids) # build Plex analyze command analyze_type = 'analyze-deeply' if config['PLEX_ANALYZE_TYPE'].lower() == 'deep' else 'analyze' if os.name == 'nt': final_cmd = '"%s" --%s --item %s' % (config['PLEX_SCANNER'], analyze_type, metadata_item_id) else: cmd = 'export LD_LIBRARY_PATH=' + config['PLEX_LD_LIBRARY_PATH'] + ';' if not config['USE_DOCKER']: cmd += 'export PLEX_MEDIA_SERVER_APPLICATION_SUPPORT_DIR=' + config['PLEX_SUPPORT_DIR'] + ';' cmd += config['PLEX_SCANNER'] + ' --' + analyze_type + ' --item ' + metadata_item_id if config['USE_DOCKER']: final_cmd = 'docker exec -u %s -i %s bash -c %s' % \ (cmd_quote(config['PLEX_USER']), cmd_quote(config['DOCKER_NAME']), cmd_quote(cmd)) elif config['USE_SUDO']: final_cmd = 'sudo -u %s bash -c %s' % (config['PLEX_USER'], cmd_quote(cmd)) else: final_cmd = cmd # begin analysis logger.debug("Starting %s analysis of 'metadata_item': %s", 'deep' if config['PLEX_ANALYZE_TYPE'].lower() == 'deep' else 'basic', metadata_item_id) logger.debug(final_cmd) utils.run_command(final_cmd.encode("utf-8")) logger.info("Finished %s analysis of 'metadata_item': %s", 'deep' if config['PLEX_ANALYZE_TYPE'].lower() == 'deep' else 'basic', metadata_item_id)
Example #20
Source File: repository.py From builds with GNU General Public License v3.0 | 5 votes |
def checkout(self, revision): """ Check out a revision. """ LOG.info("%(name)s: Updating svn repository" % dict(name=self.name)) try: utils.run_command("svn update", cwd=self.working_copy_dir) except: LOG.debug("%(name)s: Failed to update svn repository" % dict(name=self.name)) pass else: LOG.info("%(name)s: Updated svn repository" % dict(name=self.name)) LOG.info("%(name)s: Checking out revision %(revision)s" % dict(name=self.name, revision=revision)) try: utils.run_command("svn checkout %(repo_url)s@%(revision)s ." % dict(repo_url=self.url, revision=revision), cwd=self.working_copy_dir) except: message = ("Could not find revision %s at %s repository" % (revision, self.name)) LOG.exception(message) raise exception.RepositoryError(message=message)
Example #21
Source File: repository.py From builds with GNU General Public License v3.0 | 5 votes |
def checkout_from(cls, remote_repo_url, repo_path): """ Checkout a repository from a remote URL into a local path. """ LOG.info("Checking out repository from '%s' into '%s'" % (remote_repo_url, repo_path)) command = 'svn checkout ' proxy = CONF.get('http_proxy') if proxy: url = urlparse.urlparse(proxy) host = url.scheme + '://' + url.hostname port = url.port options = ("servers:global:http-proxy-host='%s'" % host, "servers:global:http-proxy-port='%s'" % port) proxy_conf = ['--config-option ' + option for option in options] command += ' '.join(proxy_conf) + ' ' command += '%(remote_repo_url)s %(local_target_path)s' % \ {'remote_repo_url': remote_repo_url, 'local_target_path': repo_path} try: utils.run_command(command) return SvnRepository(remote_repo_url, repo_path) except: message = "Failed to clone repository" LOG.exception(message) raise exception.RepositoryError(message=message)
Example #22
Source File: repository.py From builds with GNU General Public License v3.0 | 4 votes |
def archive(self, archive_name, build_dir): """ Archive repository and its submodules into a single compressed file. Args: archive_name (str): prefix of the resulting archive file name build_dir (str): path to the directory to place the archive file """ archive_file_path = os.path.join(build_dir, archive_name + ".tar") LOG.info("Archiving {name} into {file}" .format(name=self.name, file=archive_file_path)) with open(archive_file_path, "wb") as archive_file: super(GitRepository, self).archive( archive_file, prefix=archive_name + "/", format="tar") # Generate one tar file for each submodule submodules_archives_paths = [] for submodule in self.submodules: submodule_archive_file_path = os.path.join( build_dir, "%s-%s.tar" % ( archive_name, submodule.name.replace("/", "_"))) LOG.info("Archiving submodule {name} into {file}".format( name=submodule.name, file=submodule_archive_file_path)) with open(submodule_archive_file_path, "wb") as archive_file: submodule.module().archive(archive_file, prefix=os.path.join( archive_name, submodule.path) + "/", format="tar") submodules_archives_paths.append(submodule_archive_file_path) if submodules_archives_paths: LOG.info("Concatenating {name} archive with submodules" .format(name=self.name)) for submodule_archive_path in submodules_archives_paths: # The tar --concatenate option has a bug, producing an # undesired result when more than two files are # concatenated: # https://lists.gnu.org/archive/html/bug-tar/2008-08/msg00002.html cmd = "tar --concatenate --file %s %s" % ( archive_file_path, submodule_archive_path) utils.run_command(cmd) compressed_archive_file_path = archive_file_path + ".gz" LOG.info("Compressing {name} archive into {file}" .format(name=self.name, file=compressed_archive_file_path)) cmd = "gzip --fast %s" % archive_file_path utils.run_command(cmd) return compressed_archive_file_path