Python google.api_core.exceptions.AlreadyExists() Examples

The following are 30 code examples of google.api_core.exceptions.AlreadyExists(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module google.api_core.exceptions , or try the search function .
Example #1
Source File: tasks.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudTasksHook(gcp_conn_id=self.gcp_conn_id)
        try:
            queue = hook.create_queue(
                location=self.location,
                task_queue=self.task_queue,
                project_id=self.project_id,
                queue_name=self.queue_name,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            queue = hook.get_queue(
                location=self.location,
                project_id=self.project_id,
                queue_name=self.queue_name,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )

        return MessageToDict(queue) 
Example #2
Source File: manager.py    From python-docs-samples with Apache License 2.0 6 votes vote down vote up
def open_registry(
        service_account_json, project_id, cloud_region, pubsub_topic,
        registry_id):
    """Gets or creates a device registry."""
    # project_id = 'YOUR_PROJECT_ID'
    # cloud_region = 'us-central1'
    # pubsub_topic = 'your-pubsub-topic'
    # registry_id = 'your-registry-id'
    print('Creating registry')

    try:
        response = create_registry(
            service_account_json, project_id, cloud_region,
            pubsub_topic, registry_id)
    except AlreadyExists:
        # Device registry already exists. We just re-use the existing one.
        print(
            'Registry {} already exists - looking it up instead.'.format(
                registry_id))
        response = get_registry(
            service_account_json, project_id, cloud_region,
            registry_id)

    print('Registry {} opened: '.format(response.name))
    print(response) 
Example #3
Source File: fixtures.py    From python-docs-samples with Apache License 2.0 6 votes vote down vote up
def test_subscription(test_topic):
    subscriber = pubsub.SubscriberClient()
    subscription_path = subscriber.subscription_path(
        project_id, subscription_name)

    try:
        subscription = subscriber.create_subscription(
            subscription_path, test_topic.name)
    except AlreadyExists as e:
        print("The topic already exists, detail: {}".format(str(e)))
        # Ignore the error, fetch the subscription
        subscription = subscriber.get_subscription(subscription_path)

    yield subscription

    try:
        subscriber.delete_subscription(subscription_path)
    except NotFound as e:
        # We ignore this case.
        print("The subscription doesn't exist: detail: {}".format(str(e))) 
Example #4
Source File: fixtures.py    From python-docs-samples with Apache License 2.0 6 votes vote down vote up
def test_topic():
    pubsub_client = pubsub.PublisherClient()
    try:
        topic = manager.create_iot_topic(project_id, topic_id)
    except AlreadyExists as e:
        print("The topic already exists, detail: {}".format(str(e)))
        # Ignore the error, fetch the topic
        topic = pubsub_client.get_topic(
            pubsub_client.topic_path(project_id, topic_id))

    yield topic

    topic_path = pubsub_client.topic_path(project_id, topic_id)
    try:
        pubsub_client.delete_topic(topic_path)
    except NotFound as e:
        # We ignore this case.
        print("The topic doesn't exist: detail: {}".format(str(e))) 
Example #5
Source File: client.py    From rele with Apache License 2.0 6 votes vote down vote up
def create_subscription(self, subscription, topic):
        """Handles creating the subscription when it does not exists.

        This makes it easier to deploy a worker and forget about the
        subscription side of things. The subscription must
        have a topic to subscribe to. Which means that the topic must be
        created manually before the worker is started.

        :param subscription: str Subscription name
        :param topic: str Topic name to subscribe
        """
        subscription_path = self._client.subscription_path(
            self._gc_project_id, subscription
        )
        topic_path = self._client.topic_path(self._gc_project_id, topic)

        with suppress(exceptions.AlreadyExists):
            try:
                self._client.create_subscription(
                    name=subscription_path,
                    topic=topic_path,
                    ack_deadline_seconds=self._ack_deadline,
                )
            except exceptions.NotFound:
                logger.error("Cannot subscribe to a topic that does not exist.") 
Example #6
Source File: vision.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudVisionHook(gcp_conn_id=self.gcp_conn_id)
        try:
            return hook.create_product(
                location=self.location,
                product=self.product,
                project_id=self.project_id,
                product_id=self.product_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            self.log.info(
                'Product with id %s already exists. Exiting from the create operation.', self.product_id
            )
            return self.product_id 
Example #7
Source File: vision.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudVisionHook(gcp_conn_id=self.gcp_conn_id)
        try:
            return hook.create_product_set(
                location=self.location,
                project_id=self.project_id,
                product_set=self.product_set,
                product_set_id=self.product_set_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            self.log.info(
                "Product set with id %s already exists. Exiting from the create operation.",
                self.product_set_id,
            )
            return self.product_set_id 
Example #8
Source File: dlp.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudDLPHook(gcp_conn_id=self.gcp_conn_id)
        try:
            info = hook.create_stored_info_type(
                organization_id=self.organization_id,
                project_id=self.project_id,
                config=self.config,
                stored_info_type_id=self.stored_info_type_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            info = hook.get_stored_info_type(
                organization_id=self.organization_id,
                project_id=self.project_id,
                stored_info_type_id=self.stored_info_type_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        return MessageToDict(info) 
Example #9
Source File: dlp.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudDLPHook(gcp_conn_id=self.gcp_conn_id)
        try:
            trigger = hook.create_job_trigger(
                project_id=self.project_id,
                job_trigger=self.job_trigger,
                trigger_id=self.trigger_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            trigger = hook.get_job_trigger(
                project_id=self.project_id,
                job_trigger_id=self.trigger_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        return MessageToDict(trigger) 
Example #10
Source File: dlp.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudDLPHook(gcp_conn_id=self.gcp_conn_id)
        try:
            template = hook.create_inspect_template(
                organization_id=self.organization_id,
                project_id=self.project_id,
                inspect_template=self.inspect_template,
                template_id=self.template_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            template = hook.get_inspect_template(
                organization_id=self.organization_id,
                project_id=self.project_id,
                template_id=self.template_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        return MessageToDict(template) 
Example #11
Source File: dlp.py    From airflow with Apache License 2.0 6 votes vote down vote up
def execute(self, context):
        hook = CloudDLPHook(gcp_conn_id=self.gcp_conn_id)
        try:
            template = hook.create_deidentify_template(
                organization_id=self.organization_id,
                project_id=self.project_id,
                deidentify_template=self.deidentify_template,
                template_id=self.template_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            template = hook.get_deidentify_template(
                organization_id=self.organization_id,
                project_id=self.project_id,
                template_id=self.template_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )

        return MessageToDict(template) 
Example #12
Source File: test_vision.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_already_exists(self, mock_hook):
        # Exception AlreadyExists not raised, caught in the operator's execute() - idempotence
        op = CloudVisionCreateReferenceImageOperator(
            location=LOCATION_TEST,
            product_id=PRODUCT_ID_TEST,
            reference_image=REFERENCE_IMAGE_TEST,
            task_id='id',
        )
        op.execute(context=None)
        mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
        mock_hook.return_value.create_reference_image.assert_called_once_with(
            location=LOCATION_TEST,
            product_id=PRODUCT_ID_TEST,
            reference_image=REFERENCE_IMAGE_TEST,
            reference_image_id=None,
            project_id=None,
            retry=None,
            timeout=None,
            metadata=None,
        ) 
Example #13
Source File: test_dataproc.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_execute_if_cluster_exists(self, mock_hook):
        mock_hook.return_value.create_cluster.side_effect = [AlreadyExists("test")]
        op = DataprocCreateClusterOperator(
            task_id=TASK_ID,
            region=GCP_LOCATION,
            project_id=GCP_PROJECT,
            cluster=CLUSTER,
            gcp_conn_id=GCP_CONN_ID,
            retry=RETRY,
            timeout=TIMEOUT,
            metadata=METADATA,
            request_id=REQUEST_ID,
        )
        op.execute(context={})
        mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID)
        mock_hook.return_value.create_cluster.assert_called_once_with(
            region=GCP_LOCATION,
            project_id=GCP_PROJECT,
            cluster=CLUSTER,
            request_id=REQUEST_ID,
            retry=RETRY,
            timeout=TIMEOUT,
            metadata=METADATA,
        )
        mock_hook.return_value.get_cluster.assert_called_once_with(
            region=GCP_LOCATION,
            project_id=GCP_PROJECT,
            cluster_name=CLUSTER_NAME,
            retry=RETRY,
            timeout=TIMEOUT,
            metadata=METADATA,
        ) 
Example #14
Source File: manager.py    From python-docs-samples with Apache License 2.0 5 votes vote down vote up
def create_registry(
        service_account_json, project_id, cloud_region, pubsub_topic,
        registry_id):
    """ Creates a registry and returns the result. Returns an empty result if
    the registry already exists."""
    # [START iot_create_registry]
    # project_id = 'YOUR_PROJECT_ID'
    # cloud_region = 'us-central1'
    # pubsub_topic = 'your-pubsub-topic'
    # registry_id = 'your-registry-id'
    client = iot_v1.DeviceManagerClient()
    parent = client.location_path(project_id, cloud_region)

    if not pubsub_topic.startswith('projects/'):
        pubsub_topic = 'projects/{}/topics/{}'.format(project_id, pubsub_topic)

    body = {
        'event_notification_configs': [{
            'pubsub_topic_name': pubsub_topic
        }],
        'id': registry_id
    }

    try:
        response = client.create_device_registry(parent, body)
        print('Created registry')
        return response
    except HttpError:
        print('Error, registry not created')
        raise
    except AlreadyExists:
        print('Error, registry already exists')
        raise
    # [END iot_create_registry] 
Example #15
Source File: test_vision.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_already_exists(self, mock_hook):
        mock_hook.return_value.create_product_set.side_effect = AlreadyExists(message='')
        # Exception AlreadyExists not raised, caught in the operator's execute() - idempotence
        op = CloudVisionCreateProductSetOperator(
            location=LOCATION_TEST,
            product_set=PRODUCTSET_TEST,
            product_set_id=PRODUCTSET_ID_TEST,
            project_id='mock-project-id',
            task_id='id',
        )
        result = op.execute(None)
        self.assertEqual(PRODUCTSET_ID_TEST, result) 
Example #16
Source File: test_vision.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_already_exists(self, mock_hook):
        mock_hook.return_value.create_product.side_effect = AlreadyExists(message='')
        # Exception AlreadyExists not raised, caught in the operator's execute() - idempotence
        op = CloudVisionCreateProductOperator(
            location=LOCATION_TEST,
            product=PRODUCT_TEST,
            product_id=PRODUCT_ID_TEST,
            project_id='mock-project-id',
            task_id='id',
        )
        result = op.execute(None)
        self.assertEqual(PRODUCT_ID_TEST, result) 
Example #17
Source File: samples.py    From python-docs-samples with Apache License 2.0 5 votes vote down vote up
def create_occurrence_subscription(subscription_id, project_id):
    """Creates a new Pub/Sub subscription object listening to the
    Container Analysis Occurrences topic."""
    # subscription_id := 'my-occurrences-subscription'
    # project_id = 'my-gcp-project'

    from google.api_core.exceptions import AlreadyExists
    from google.cloud.pubsub import SubscriberClient

    topic_id = 'container-analysis-occurrences-v1'
    client = SubscriberClient()
    topic_name = client.topic_path(project_id, topic_id)
    subscription_name = client.subscription_path(project_id, subscription_id)
    success = True
    try:
        client.create_subscription(subscription_name, topic_name)
    except AlreadyExists:
        # if subscription already exists, do nothing
        pass
    else:
        success = False
    return success
# [END containeranalysis_pubsub]


# [START containeranalysis_poll_discovery_occurrence_finished] 
Example #18
Source File: samples_test.py    From python-docs-samples with Apache License 2.0 5 votes vote down vote up
def test_pubsub(self):
        # create topic if needed
        client = SubscriberClient()
        try:
            topic_id = 'container-analysis-occurrences-v1'
            topic_name = client.topic_path(PROJECT_ID, topic_id)
            publisher = PublisherClient()
            publisher.create_topic(topic_name)
        except AlreadyExists:
            pass

        subscription_id = 'container-analysis-test-{}'.format(uuid.uuid4())
        subscription_name = client.subscription_path(PROJECT_ID,
                                                     subscription_id)
        samples.create_occurrence_subscription(subscription_id, PROJECT_ID)

        # I can not make it pass with multiple messages. My guess is
        # the server started to dedup?
        message_count = 1
        try:
            job_done = threading.Event()
            receiver = MessageReceiver(message_count, job_done)
            client.subscribe(subscription_name, receiver.pubsub_callback)

            for i in range(message_count):
                occ = samples.create_occurrence(
                    self.image_url, self.note_id, PROJECT_ID, PROJECT_ID)
                time.sleep(SLEEP_TIME)
                samples.delete_occurrence(basename(occ.name), PROJECT_ID)
                time.sleep(SLEEP_TIME)
            # We saw occational failure with 60 seconds timeout, so we bumped it
            # to 180 seconds.
            # See also: python-docs-samples/issues/2894
            job_done.wait(timeout=180)
            print('done. msg_count = {}'.format(receiver.msg_count))
            assert message_count <= receiver.msg_count
        finally:
            # clean up
            client.delete_subscription(subscription_name) 
Example #19
Source File: pubsub.py    From platypush with MIT License 5 votes vote down vote up
def send_message(self, topic: str, msg, **kwargs):
        """
        Sends a message to a topic

        :param topic: Topic/channel where the message will be delivered. You can either specify the full topic name in
            the format ``projects/<project_id>/topics/<topic_name>``, where ``<project_id>`` must be the ID of your
            Google Pub/Sub project, or just ``<topic_name>``  - in such case it's implied that you refer to the
            ``topic_name`` under the ``project_id`` of your service credentials.
        :param msg: Message to be sent. It can be a list, a dict, or a Message object
        :param kwargs: Extra arguments to be passed to .publish()
        """
        # noinspection PyPackageRequirements
        from google.cloud import pubsub_v1
        # noinspection PyPackageRequirements
        from google.api_core.exceptions import AlreadyExists

        credentials = self.get_credentials(self.publisher_audience)
        publisher = pubsub_v1.PublisherClient(credentials=credentials)

        if not topic.startswith('projects/{}/topics/'.format(self.project_id)):
            topic = 'projects/{}/topics/{}'.format(self.project_id, topic)

        try:
            publisher.create_topic(topic)
        except AlreadyExists:
            pass

        if isinstance(msg, int) or isinstance(msg, float):
            msg = str(msg)
        if isinstance(msg, dict) or isinstance(msg, list):
            msg = json.dumps(msg)
        if isinstance(msg, str):
            msg = msg.encode()

        publisher.publish(topic, msg, **kwargs)


# vim:sw=4:ts=4:et: 
Example #20
Source File: pubsub.py    From platypush with MIT License 5 votes vote down vote up
def run(self):
        # noinspection PyPackageRequirements
        from google.cloud import pubsub_v1
        # noinspection PyPackageRequirements
        from google.api_core.exceptions import AlreadyExists

        super().run()
        set_thread_name('GooglePubSub')
        plugin = self._get_plugin()
        project_id = plugin.get_project_id()
        credentials = plugin.get_credentials(plugin.subscriber_audience)
        subscriber = pubsub_v1.SubscriberClient(credentials=credentials)

        for topic in self.topics:
            if not topic.startswith('projects/{}/topics/'.format(project_id)):
                topic = 'projects/{}/topics/{}'.format(project_id, topic)
            subscription_name = '/'.join([*topic.split('/')[:2], 'subscriptions', topic.split('/')[-1]])

            try:
                subscriber.create_subscription(name=subscription_name, topic=topic)
            except AlreadyExists:
                pass

            subscriber.subscribe(subscription_name, self._message_callback(topic))

        self.wait_stop()


# vim:sw=4:ts=4:et: 
Example #21
Source File: pub_test.py    From python-pubsub with Apache License 2.0 5 votes vote down vote up
def topic(publisher_client):
    topic_path = publisher_client.topic_path(PROJECT, TOPIC)

    try:
        publisher_client.create_topic(topic_path)
    except AlreadyExists:
        pass

    yield TOPIC

    publisher_client.delete_topic(topic_path) 
Example #22
Source File: sub_test.py    From python-pubsub with Apache License 2.0 5 votes vote down vote up
def subscription_path(topic_path):
    subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION)

    try:
        subscription = subscriber_client.create_subscription(
            subscription_path, topic_path
        )
        yield subscription.name
    except AlreadyExists:
        yield subscription_path

    subscriber_client.delete_subscription(subscription_path)
    subscriber_client.close() 
Example #23
Source File: sub_test.py    From python-pubsub with Apache License 2.0 5 votes vote down vote up
def topic_path():
    topic_path = publisher_client.topic_path(PROJECT, TOPIC)

    try:
        topic = publisher_client.create_topic(topic_path)
        yield topic.name
    except AlreadyExists:
        yield topic_path

    publisher_client.delete_topic(topic_path) 
Example #24
Source File: test_pubsub.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_create_preexisting_topic_failifexists(self, mock_service):
        mock_service.return_value.create_topic.side_effect = AlreadyExists(
            'Topic already exists: %s' % TEST_TOPIC
        )
        with self.assertRaises(PubSubException) as e:
            self.pubsub_hook.create_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC, fail_if_exists=True)
        self.assertEqual(str(e.exception), 'Topic already exists: %s' % TEST_TOPIC) 
Example #25
Source File: test_pubsub.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_create_subscription_failifexists(self, mock_service):
        mock_service.create_subscription.side_effect = AlreadyExists(
            'Subscription already exists: %s' % EXPANDED_SUBSCRIPTION
        )
        with self.assertRaises(PubSubException) as e:
            self.pubsub_hook.create_subscription(
                project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION, fail_if_exists=True
            )
        self.assertEqual(str(e.exception), 'Subscription already exists: %s' % EXPANDED_SUBSCRIPTION) 
Example #26
Source File: test_system.py    From python-firestore with Apache License 2.0 5 votes vote down vote up
def test_create_document(client, cleanup):
    now = datetime.datetime.utcnow().replace(tzinfo=UTC)
    collection_id = "doc-create" + UNIQUE_RESOURCE_ID
    document_id = "doc" + UNIQUE_RESOURCE_ID
    document = client.document(collection_id, document_id)
    # Add to clean-up before API request (in case ``create()`` fails).
    cleanup(document.delete)

    data = {
        "now": firestore.SERVER_TIMESTAMP,
        "eenta-ger": 11,
        "bites": b"\xe2\x98\x83 \xe2\x9b\xb5",
        "also": {"nestednow": firestore.SERVER_TIMESTAMP, "quarter": 0.25},
    }
    write_result = document.create(data)
    updated = _pb_timestamp_to_datetime(write_result.update_time)
    delta = updated - now
    # Allow a bit of clock skew, but make sure timestamps are close.
    assert -300.0 < delta.total_seconds() < 300.0

    with pytest.raises(AlreadyExists):
        document.create(data)

    # Verify the server times.
    snapshot = document.get()
    stored_data = snapshot.to_dict()
    server_now = stored_data["now"]

    delta = updated - server_now
    # NOTE: We could check the ``transform_results`` from the write result
    #       for the document transform, but this value gets dropped. Instead
    #       we make sure the timestamps are close.
    assert 0.0 <= delta.total_seconds() < 5.0
    expected_data = {
        "now": server_now,
        "eenta-ger": data["eenta-ger"],
        "bites": data["bites"],
        "also": {"nestednow": server_now, "quarter": data["also"]["quarter"]},
    }
    assert stored_data == expected_data 
Example #27
Source File: datacatalog.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context: Dict):
        hook = CloudDataCatalogHook(gcp_conn_id=self.gcp_conn_id)
        try:
            result = hook.create_tag_template_field(
                location=self.location,
                tag_template=self.tag_template,
                tag_template_field_id=self.tag_template_field_id,
                tag_template_field=self.tag_template_field,
                project_id=self.project_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            self.log.info("Tag template field already exists. Skipping create operation.")
            tag_template = hook.get_tag_template(
                location=self.location,
                tag_template=self.tag_template,
                project_id=self.project_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
            result = tag_template.fields[self.tag_template_field_id]

        self.log.info("Current Tag ID: %s", self.tag_template_field_id)
        context["task_instance"].xcom_push(key="tag_template_field_id", value=self.tag_template_field_id)
        return MessageToDict(result) 
Example #28
Source File: datacatalog.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context: Dict):
        hook = CloudDataCatalogHook(gcp_conn_id=self.gcp_conn_id)
        try:
            result = hook.create_tag_template(
                location=self.location,
                tag_template_id=self.tag_template_id,
                tag_template=self.tag_template,
                project_id=self.project_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            self.log.info("Tag Template already exists. Skipping create operation.")
            result = hook.get_tag_template(
                location=self.location,
                tag_template=self.tag_template_id,
                project_id=self.project_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        _, _, tag_template = result.name.rpartition("/")
        self.log.info("Current Tag ID: %s", tag_template)
        context["task_instance"].xcom_push(key="tag_template_id", value=tag_template)
        return MessageToDict(result) 
Example #29
Source File: datacatalog.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context: Dict):
        hook = CloudDataCatalogHook(gcp_conn_id=self.gcp_conn_id)
        try:
            result = hook.create_entry_group(
                location=self.location,
                entry_group_id=self.entry_group_id,
                entry_group=self.entry_group,
                project_id=self.project_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            self.log.info("Entry already exists. Skipping create operation.")
            result = hook.get_entry_group(
                location=self.location,
                entry_group=self.entry_group_id,
                project_id=self.project_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )

        _, _, entry_group_id = result.name.rpartition("/")
        self.log.info("Current entry group ID: %s", entry_group_id)
        context["task_instance"].xcom_push(key="entry_group_id", value=entry_group_id)
        return MessageToDict(result) 
Example #30
Source File: datacatalog.py    From airflow with Apache License 2.0 5 votes vote down vote up
def execute(self, context: Dict):
        hook = CloudDataCatalogHook(gcp_conn_id=self.gcp_conn_id)
        try:
            result = hook.create_entry(
                location=self.location,
                entry_group=self.entry_group,
                entry_id=self.entry_id,
                entry=self.entry,
                project_id=self.project_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        except AlreadyExists:
            self.log.info("Entry already exists. Skipping create operation.")
            result = hook.get_entry(
                location=self.location,
                entry_group=self.entry_group,
                entry=self.entry_id,
                project_id=self.project_id,
                retry=self.retry,
                timeout=self.timeout,
                metadata=self.metadata,
            )
        _, _, entry_id = result.name.rpartition("/")
        self.log.info("Current entry_id ID: %s", entry_id)
        context["task_instance"].xcom_push(key="entry_id", value=entry_id)
        return MessageToDict(result)