Python googleapiclient.discovery.Resource() Examples
The following are 21
code examples of googleapiclient.discovery.Resource().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
googleapiclient.discovery
, or try the search function
.
Example #1
Source File: discovery_api.py From airflow with Apache License 2.0 | 6 votes |
def get_conn(self): """ Creates an authenticated api client for the given api service name and credentials. :return: the authenticated api service. :rtype: Resource """ self.log.info("Authenticating Google API Client") if not self._conn: http_authorized = self._authorize() self._conn = build( serviceName=self.api_service_name, version=self.api_version, http=http_authorized, cache_discovery=False ) return self._conn
Example #2
Source File: test_service_mock.py From sheetfu with MIT License | 5 votes |
def test_mock_instantiation(self): service = SheetsService().build(http=self.http_mocks) assert isinstance(service, Resource)
Example #3
Source File: cloudbuild.py From django-cloud-deploy with Apache License 2.0 | 5 votes |
def __init__(self, cloudbuild_service: discovery.Resource): self._cloudbuild_service = cloudbuild_service
Example #4
Source File: service_account.py From django-cloud-deploy with Apache License 2.0 | 5 votes |
def __init__(self, iam_service: discovery.Resource, cloudresourcemanager_service: discovery.Resource): self._iam_service = iam_service self._cloudresourcemanager_service = cloudresourcemanager_service
Example #5
Source File: container.py From django-cloud-deploy with Apache License 2.0 | 5 votes |
def __init__(self, container_service: discovery.Resource, credentials: credentials.Credentials): self._container_service = container_service self._create_docker_client(credentials)
Example #6
Source File: cloudkms.py From django-cloud-deploy with Apache License 2.0 | 5 votes |
def __init__(self, cloudkms_service: discovery.Resource): self._cloudkms_service = cloudkms_service
Example #7
Source File: enable_service.py From django-cloud-deploy with Apache License 2.0 | 5 votes |
def __init__(self, service_usage_service: discovery.Resource): self._service_usage_service = service_usage_service
Example #8
Source File: database.py From django-cloud-deploy with Apache License 2.0 | 5 votes |
def __init__(self, sqladmin_service: discovery.Resource): self._sqladmin_service = sqladmin_service
Example #9
Source File: storage.py From django-cloud-deploy with Apache License 2.0 | 5 votes |
def __init__(self, storage_service: discovery.Resource): self._storage_service = storage_service
Example #10
Source File: billing.py From django-cloud-deploy with Apache License 2.0 | 5 votes |
def __init__(self, billing_service: discovery.Resource): self._billing_service = billing_service
Example #11
Source File: cloud_source.py From django-cloud-deploy with Apache License 2.0 | 5 votes |
def __init__(self, cloudsource_service: discovery.Resource): self._cloudsource_service = cloudsource_service
Example #12
Source File: __init__.py From starthinker with Apache License 2.0 | 5 votes |
def execute(self, run=True, iterate=True): # start building call sequence with service object self.function = get_service(self.api, self.version, self.auth, uri_file=self.uri) # build calls along stack # do not call functions, as the abstract is necessary for iterator page next calls for f_n in self.function_stack: #print(type(self.function), isinstance(self.function, Resource)) self.function = getattr(self.function if isinstance(self.function, Resource) else self.function(), f_n) # for cases where job is handled manually, save the job self.job = self.function(**self.function_kwargs) if run: self.response = API_Retry(self.job) # if paginated, automatically iterate if (iterate and (self.iterate or (isinstance(self.response, dict) and 'nextPageToken' in self.response))): return API_Iterator(self.function, self.function_kwargs, self.response) # if not paginated, return object as is else: return self.response # if not run, just return job object ( for chunked upload for example ) else: return self.job
Example #13
Source File: campaign_manager.py From airflow with Apache License 2.0 | 5 votes |
def get_conn(self) -> Resource: """ Retrieves connection to Campaign Manager. """ if not self._conn: http_authorized = self._authorize() self._conn = build( "dfareporting", self.api_version, http=http_authorized, cache_discovery=False, ) return self._conn
Example #14
Source File: analytics.py From airflow with Apache License 2.0 | 5 votes |
def get_conn(self) -> Resource: """ Retrieves connection to Google Analytics 360. """ if not self._conn: http_authorized = self._authorize() self._conn = build( "analytics", self.api_version, http=http_authorized, cache_discovery=False, ) return self._conn
Example #15
Source File: analytics.py From airflow with Apache License 2.0 | 5 votes |
def _paginate(self, resource: Resource, list_args: Optional[Dict[str, Any]] = None): list_args = list_args or {} result: List[Dict] = [] while True: # start index has value 1 request = resource.list(start_index=len(result) + 1, **list_args) # pylint: disable=no-member response = request.execute(num_retries=self.num_retries) result.extend(response.get("items", [])) # result is the number of fetched links from Analytics # when all links will be added to the result # the loop will break if response["totalResults"] <= len(result): break return result
Example #16
Source File: display_video.py From airflow with Apache License 2.0 | 5 votes |
def get_conn_to_display_video(self) -> Resource: """ Retrieves connection to DisplayVideo. """ if not self._conn: http_authorized = self._authorize() self._conn = build( "displayvideo", self.api_version, http=http_authorized, cache_discovery=False, ) return self._conn
Example #17
Source File: display_video.py From airflow with Apache License 2.0 | 5 votes |
def get_conn(self) -> Resource: """ Retrieves connection to DisplayVideo. """ if not self._conn: http_authorized = self._authorize() self._conn = build( "doubleclickbidmanager", self.api_version, http=http_authorized, cache_discovery=False, ) return self._conn
Example #18
Source File: basefacade.py From ScoutSuite with GNU General Public License v2.0 | 5 votes |
def _get_client(self) -> discovery.Resource: return self._build_client()
Example #19
Source File: basefacade.py From ScoutSuite with GNU General Public License v2.0 | 5 votes |
def _build_client(self) -> discovery.Resource: return self._build_arbitrary_client(self._client_name, self._client_version)
Example #20
Source File: vep_runner.py From gcp-variant-transforms with Apache License 2.0 | 4 votes |
def __init__( self, pipeline_service, # type: discovery.Resource species, # type: str assembly, # type: str input_pattern, # type: str output_dir, # type: str vep_info_field, # type: str vep_image_uri, # type: str vep_cache_path, # type: str vep_num_fork, # type: int pipeline_args, # type: List[str] watchdog_file, # type: Optional[str] watchdog_file_update_interval_seconds, # type: int ): # type: (...) -> None """Constructs an instance for running VEP. Note that external users of this class can use create_runner_and_update_args function of this module to create an instance of this class from flags. Args: input_pattern: The pattern to identify all input files. output_dir: The location for all output files. This is expected not to exist and is created in the process of running VEP pipelines. vep_image_uri: The URI of the image that contains VEP. vep_cache_path: The URI for the cache file on GCS. vep_num_fork: The value of the --fork argument for running VEP. pipeline_args: The list of arguments that are meant to be used when running Beam; for simplicity we use the same arguments to decide how many and what type of workers to use, where to run, etc. watchdog_file: The file that will be updated by the Dataflow worker every `watchdog_file_update_interval_seconds`. Once the file is found to be stale, the VEP process running in the VM will be killed. watchdog_file_update_interval_seconds: The `watchdog_file` will be updated by the Dataflow worker every `watchdog_file_update_interval_seconds`. """ self._pipeline_service = pipeline_service self._species = species self._assembly = assembly self._vep_image_uri = vep_image_uri self._vep_cache_path = self._make_vep_cache_path(vep_cache_path) self._vep_num_fork = vep_num_fork self._input_pattern = input_pattern self._output_dir = output_dir self._vep_info_field = vep_info_field self._process_pipeline_args(pipeline_args) self._watchdog_file = watchdog_file self._watchdog_file_update_interval_seconds = ( watchdog_file_update_interval_seconds) self._running_operation_ids = [] # type: List[str] self._operation_name_to_io_infos = {} self._operation_name_to_logs = {}
Example #21
Source File: storage.py From django-cloud-deploy with Apache License 2.0 | 4 votes |
def create_bucket(self, project_id: str, bucket_name: str): """Create a Google Cloud Storage Bucket on the given project. Args: project_id: Id of the GCP project. bucket_name: Name of the bucket to create. Raises: CloudStorageError: When it fails to create the bucket. """ bucket_body = {'name': bucket_name} request = self._storage_service.buckets().insert(project=project_id, body=bucket_body) try: response = request.execute(num_retries=5) # When the api call succeed, the response is a Bucket Resource # object. See # https://cloud.google.com/storage/docs/json_api/v1/buckets#resource if 'name' not in response: raise CloudStorageError( 'Unexpected response creating bucket "{}" in project "{}"' ': {}'.format(bucket_name, project_id, response)) except errors.HttpError as e: if e.resp.status == 403: raise CloudStorageError( 'You do not have permission to create bucket in project: ' '"{}"'.format(project_id)) elif e.resp.status == 409: # A bucket with the given name already exist. But we don't know # whether that bucket exist under our GCP project or it exist # under somebody else's GCP project. # We will reuse the bucket if it exists under our GCP project. if self._bucket_exist(project_id, bucket_name): return else: raise CloudStorageError( 'Bucket "{}" already exist. Name of the bucket should ' 'be unique across the whole Google Cloud ' 'Platform.'.format(bucket_name)) else: raise CloudStorageError( 'Unexpected error when creating bucket "{}" in project "{}"' .format(bucket_name, project_id)) from e