Python airflow.exceptions.AirflowException() Examples

The following are 30 code examples of airflow.exceptions.AirflowException(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module airflow.exceptions , or try the search function .
Example #1
Source File: test_databricks.py    From airflow with Apache License 2.0 7 votes vote down vote up
def test_do_api_call_waits_between_retries(self, mock_sleep):
        retry_delay = 5
        self.hook = DatabricksHook(retry_delay=retry_delay)

        for exception in [requests_exceptions.ConnectionError,
                          requests_exceptions.SSLError,
                          requests_exceptions.Timeout,
                          requests_exceptions.ConnectTimeout,
                          requests_exceptions.HTTPError]:
            with mock.patch('airflow.providers.databricks.hooks.databricks.requests') as mock_requests:
                with mock.patch.object(self.hook.log, 'error'):
                    mock_sleep.reset_mock()
                    setup_mock_requests(mock_requests, exception)

                    with self.assertRaises(AirflowException):
                        self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {})

                    self.assertEqual(len(mock_sleep.mock_calls), self.hook.retry_limit - 1)
                    calls = [
                        mock.call(retry_delay),
                        mock.call(retry_delay)
                    ]
                    mock_sleep.assert_has_calls(calls) 
Example #2
Source File: test_http.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_poke_exception(self, mock_session_send):
        """
        Exception occurs in poke function should not be ignored.
        """
        response = requests.Response()
        response.status_code = 200
        mock_session_send.return_value = response

        def resp_check(_):
            raise AirflowException('AirflowException raised here!')

        task = HttpSensor(
            task_id='http_sensor_poke_exception',
            http_conn_id='http_default',
            endpoint='',
            request_params={},
            response_check=resp_check,
            timeout=5,
            poke_interval=1)
        with self.assertRaisesRegex(AirflowException, 'AirflowException raised here!'):
            task.execute(context={}) 
Example #3
Source File: test_core.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_on_failure_callback(self):
        # Annoying workaround for nonlocal not existing in python 2
        data = {'called': False}

        def check_failure(context, test_case=self):
            data['called'] = True
            error = context.get('exception')
            test_case.assertIsInstance(error, AirflowException)

        op = BashOperator(
            task_id='check_on_failure_callback',
            bash_command="exit 1",
            dag=self.dag,
            on_failure_callback=check_failure)
        self.assertRaises(
            AirflowException,
            op.run,
            start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
        self.assertTrue(data['called']) 
Example #4
Source File: logging_command_executor.py    From airflow with Apache License 2.0 6 votes vote down vote up
def check_output(self, cmd):
        self.log.info("Executing for output: '%s'", " ".join(cmd))
        process = subprocess.Popen(args=cmd, stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE)
        output, err = process.communicate()
        retcode = process.poll()
        if retcode:
            self.log.error("Error when executing '%s'", " ".join(cmd))
            self.log.info("Stdout: %s", output)
            self.log.info("Stderr: %s", err)
            raise AirflowException("Retcode {} on {} with stdout: {}, stderr: {}".
                                   format(retcode, " ".join(cmd), output, err))
        return output 
Example #5
Source File: system_tests_class.py    From airflow with Apache License 2.0 6 votes vote down vote up
def run_dag(self, dag_id: str, dag_folder: str = DEFAULT_DAG_FOLDER) -> None:
        """
        Runs example dag by it's ID.

        :param dag_id: id of a DAG to be run
        :type dag_id: str
        :param dag_folder: directory where to look for the specific DAG. Relative to AIRFLOW_HOME.
        :type dag_folder: str
        """
        if os.environ.get("RUN_AIRFLOW_1_10") == "true":
            # For system tests purpose we are changing airflow/providers
            # to side packages path of the installed providers package
            python = f"python{sys.version_info.major}.{sys.version_info.minor}"
            dag_folder = dag_folder.replace(
                "/opt/airflow/airflow/providers",
                f"/usr/local/lib/{python}/site-packages/airflow/providers",
            )
        self.log.info("Looking for DAG: %s in %s", dag_id, dag_folder)
        dag_bag = DagBag(dag_folder=dag_folder, include_examples=False)
        dag = dag_bag.get_dag(dag_id)
        if dag is None:
            raise AirflowException(
                "The Dag {dag_id} could not be found. It's either an import problem,"
                "wrong dag_id or DAG is not in provided dag_folder."
                "The content of the {dag_folder} folder is {content}".format(
                    dag_id=dag_id,
                    dag_folder=dag_folder,
                    content=os.listdir(dag_folder),
                )
            )

        self.log.info("Attempting to run DAG: %s", dag_id)
        dag.clear(reset_dag_runs=True)
        try:
            dag.run(ignore_first_depends_on_past=True, verbose=True)
        except Exception:
            self._print_all_log_files()
            raise 
Example #6
Source File: test_decorators.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_default_args(self):
        default_args = {'test_param': True}
        dummy_class = DummyClass(default_args=default_args)  # pylint: disable=no-value-for-parameter
        self.assertTrue(dummy_class.test_param)

        default_args = {'test_param': True, 'test_sub_param': True}
        dummy_subclass = DummySubClass(default_args=default_args)  # pylint: disable=no-value-for-parameter
        self.assertTrue(dummy_class.test_param)
        self.assertTrue(dummy_subclass.test_sub_param)

        default_args = {'test_param': True}
        dummy_subclass = DummySubClass(default_args=default_args, test_sub_param=True)
        self.assertTrue(dummy_class.test_param)
        self.assertTrue(dummy_subclass.test_sub_param)

        with self.assertRaisesRegex(AirflowException,
                                    'Argument.*test_sub_param.*required'):
            DummySubClass(default_args=default_args)  # pylint: disable=no-value-for-parameter 
Example #7
Source File: test_docker_swarm.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_failed_service_raises_error(self, types_mock, client_class_mock):

        mock_obj = mock.Mock()

        client_mock = mock.Mock(spec=APIClient)
        client_mock.create_service.return_value = {'ID': 'some_id'}
        client_mock.images.return_value = []
        client_mock.pull.return_value = [b'{"status":"pull log"}']
        client_mock.tasks.return_value = [{'Status': {'State': 'failed'}}]
        types_mock.TaskTemplate.return_value = mock_obj
        types_mock.ContainerSpec.return_value = mock_obj
        types_mock.RestartPolicy.return_value = mock_obj
        types_mock.Resources.return_value = mock_obj

        client_class_mock.return_value = client_mock

        operator = DockerSwarmOperator(image='', auto_remove=False, task_id='unittest', enable_logging=False)
        msg = "Service failed: {'ID': 'some_id'}"
        with self.assertRaises(AirflowException) as error:
            operator.execute(None)
        self.assertEqual(str(error.exception), msg) 
Example #8
Source File: test_azure_container_instances.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_execute_with_failures(self, aci_mock):
        expected_c_state = ContainerState(state='Terminated', exit_code=1, detail_status='test')
        expected_cg = make_mock_cg(expected_c_state)

        aci_mock.return_value.get_state.return_value = expected_cg
        aci_mock.return_value.exists.return_value = False

        aci = AzureContainerInstancesOperator(ci_conn_id=None,
                                              registry_conn_id=None,
                                              resource_group='resource-group',
                                              name='container-name',
                                              image='container-image',
                                              region='region',
                                              task_id='task')
        with self.assertRaises(AirflowException):
            aci.execute(None)

        self.assertEqual(aci_mock.return_value.delete.call_count, 1) 
Example #9
Source File: test_qubole_check.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_execute_assertion_fail(self, mock_get_hook):

        mock_cmd = mock.Mock()
        mock_cmd.status = 'done'
        mock_cmd.id = 123
        mock_cmd.is_success = mock.Mock(
            return_value=HiveCommand.is_success(mock_cmd.status))

        mock_hook = mock.Mock()
        mock_hook.get_first.return_value = [11]
        mock_hook.cmd = mock_cmd
        mock_get_hook.return_value = mock_hook

        operator = self.__construct_operator('select value from tab1 limit 1;', 5, 1)

        with self.assertRaisesRegex(AirflowException,
                                    'Qubole Command Id: ' + str(mock_cmd.id)):
            operator.execute()

        mock_cmd.is_success.assert_called_once_with(mock_cmd.status) 
Example #10
Source File: test_qubole_check.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_execute_assert_query_fail(self, mock_get_hook):

        mock_cmd = mock.Mock()
        mock_cmd.status = 'error'
        mock_cmd.id = 123
        mock_cmd.is_success = mock.Mock(
            return_value=HiveCommand.is_success(mock_cmd.status))

        mock_hook = mock.Mock()
        mock_hook.get_first.return_value = [11]
        mock_hook.cmd = mock_cmd
        mock_get_hook.return_value = mock_hook

        operator = self.__construct_operator('select value from tab1 limit 1;', 5, 1)

        with self.assertRaises(AirflowException) as cm:
            operator.execute()

        self.assertNotIn('Qubole Command Id: ', str(cm.exception))
        mock_cmd.is_success.assert_called_once_with(mock_cmd.status) 
Example #11
Source File: test_livy.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_poll_for_termination_fail(self, mock_livy):

        state_list = 2 * [BatchState.RUNNING] + [BatchState.ERROR]

        def side_effect(_):
            if state_list:
                return state_list.pop(0)
            # fail if does not stop right before
            raise AssertionError()

        mock_livy.side_effect = side_effect

        task = LivyOperator(
            file='sparkapp',
            polling_interval=1,
            dag=self.dag,
            task_id='livy_example'
        )
        task._livy_hook = task.get_hook()

        with self.assertRaises(AirflowException):
            task.poll_for_termination(BATCH_ID)

        mock_livy.assert_called_with(BATCH_ID)
        self.assertEqual(mock_livy.call_count, 3) 
Example #12
Source File: test_pinot.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_run_cli_failure_status_code(self, mock_popen):
        mock_proc = mock.MagicMock()
        mock_proc.returncode = 1
        mock_proc.stdout = io.BytesIO(b'')
        mock_popen.return_value = mock_proc

        self.db_hook.pinot_admin_system_exit = True
        params = ["foo", "bar", "baz"]
        with self.assertRaises(AirflowException):
            self.db_hook.run_cli(params)
        params.insert(0, self.conn.extra_dejson.get('cmd_path'))
        env = os.environ.copy()
        env.update({"JAVA_OPTS": "-Dpinot.admin.system.exit=true "})
        mock_popen.assert_called_once_with(params,
                                           stderr=subprocess.STDOUT,
                                           stdout=subprocess.PIPE,
                                           close_fds=True,
                                           env=env) 
Example #13
Source File: test_spark_sql.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_spark_process_runcmd_and_fail(self, mock_popen):
        # Given
        sql = 'SELECT 1'
        master = 'local'
        params = '--deploy-mode cluster'
        status = 1
        mock_popen.return_value.wait.return_value = status

        # When
        with self.assertRaises(AirflowException) as e:
            hook = SparkSqlHook(
                conn_id='spark_default',
                sql=sql,
                master=master,
            )
            hook.run_query(params)

        # Then
        self.assertEqual(
            str(e.exception),
            "Cannot execute '{}' on {} (additional parameters: '{}'). Process exit code: {}.".format(
                sql, master, params, status
            )
        ) 
Example #14
Source File: test_druid.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_submit_gone_wrong(self, m):
        task_post = m.post(
            'http://druid-overlord:8081/druid/indexer/v1/task',
            text='{"task":"9f8a7359-77d4-4612-b0cd-cc2f6a3c28de"}'
        )
        status_check = m.get(
            'http://druid-overlord:8081/druid/indexer/v1/task/'
            '9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
            text='{"status":{"status": "FAILED"}}'
        )

        # The job failed for some reason
        with self.assertRaises(AirflowException):
            self.db_hook.submit_indexing_job('Long json file')

        self.assertTrue(task_post.called_once)
        self.assertTrue(status_check.called_once) 
Example #15
Source File: test_druid.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_submit_unknown_response(self, m):
        task_post = m.post(
            'http://druid-overlord:8081/druid/indexer/v1/task',
            text='{"task":"9f8a7359-77d4-4612-b0cd-cc2f6a3c28de"}'
        )
        status_check = m.get(
            'http://druid-overlord:8081/druid/indexer/v1/task/'
            '9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
            text='{"status":{"status": "UNKNOWN"}}'
        )

        # An unknown error code
        with self.assertRaises(AirflowException):
            self.db_hook.submit_indexing_job('Long json file')

        self.assertTrue(task_post.called_once)
        self.assertTrue(status_check.called_once) 
Example #16
Source File: test_druid.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_submit_timeout(self, m):
        self.db_hook.timeout = 1
        self.db_hook.max_ingestion_time = 5
        task_post = m.post(
            'http://druid-overlord:8081/druid/indexer/v1/task',
            text='{"task":"9f8a7359-77d4-4612-b0cd-cc2f6a3c28de"}'
        )
        status_check = m.get(
            'http://druid-overlord:8081/druid/indexer/v1/task/'
            '9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/status',
            text='{"status":{"status": "RUNNING"}}'
        )
        shutdown_post = m.post(
            'http://druid-overlord:8081/druid/indexer/v1/task/'
            '9f8a7359-77d4-4612-b0cd-cc2f6a3c28de/shutdown',
            text='{"task":"9f8a7359-77d4-4612-b0cd-cc2f6a3c28de"}'
        )

        # Because the jobs keeps running
        with self.assertRaises(AirflowException):
            self.db_hook.submit_indexing_job('Long json file')

        self.assertTrue(task_post.called_once)
        self.assertTrue(status_check.called)
        self.assertTrue(shutdown_post.called_once) 
Example #17
Source File: test_livy.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_get_batch_state_missing(self, mock):
        mock.register_uri(
            'GET', '//livy:8998/batches/{}/state'.format(BATCH_ID),
            json={},
            status_code=200
        )

        hook = LivyHook()
        with self.assertRaises(AirflowException):
            hook.get_batch_state(BATCH_ID) 
Example #18
Source File: test_livy.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_post_batch_fail(self, mock):
        mock.register_uri(
            'POST', '//livy:8998/batches',
            json={},
            status_code=400,
            reason='ERROR'
        )

        hook = LivyHook()
        with self.assertRaises(AirflowException):
            hook.post_batch(file='sparkapp') 
Example #19
Source File: test_core.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_illegal_args_forbidden(self):
        """
        Tests that operators raise exceptions on illegal arguments when
        illegal arguments are not allowed.
        """
        with self.assertRaises(AirflowException) as ctx:
            BashOperator(
                task_id='test_illegal_args',
                bash_command='echo success',
                dag=self.dag,
                illegal_argument_1234='hello?')
        self.assertIn(
            ('Invalid arguments were passed to BashOperator '
             '(task_id: test_illegal_args).'),
            str(ctx.exception)) 
Example #20
Source File: test_pig.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_run_cli_fail(self, popen_mock):
        proc_mock = mock.MagicMock()
        proc_mock.returncode = 1
        proc_mock.stdout.readline.return_value = b''
        popen_mock.return_value = proc_mock

        hook = self.pig_hook()

        from airflow.exceptions import AirflowException
        self.assertRaises(AirflowException, hook.run_cli, "") 
Example #21
Source File: test_imap.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_download_mail_attachments_with_regex_not_found(self, mock_imaplib, mock_open_method):
        _create_fake_imap(mock_imaplib, with_mail=True)

        with ImapHook() as imap_hook:
            self.assertRaises(AirflowException,
                              imap_hook.download_mail_attachments,
                              name=r'test_(\d+).csv',
                              local_output_directory='test_directory',
                              check_regex=True)

        mock_open_method.assert_not_called()
        mock_open_method.return_value.write.assert_not_called() 
Example #22
Source File: test_livy.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_get_batch_fail(self, mock):
        mock.register_uri(
            'GET', '//livy:8998/batches/{}'.format(BATCH_ID),
            json={'msg': 'Unable to find batch'},
            status_code=404,
            reason='ERROR'
        )

        hook = LivyHook()
        with self.assertRaises(AirflowException):
            hook.get_batch(BATCH_ID) 
Example #23
Source File: test_hive_stats.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_execute_no_query_results(self, mock_hive_metastore_hook, mock_presto_hook, mock_mysql_hook):
        mock_hive_metastore_hook.return_value.get_table.return_value.sd.cols = [fake_col]
        mock_mysql_hook.return_value.get_records.return_value = False
        mock_presto_hook.return_value.get_first.return_value = None

        self.assertRaises(AirflowException, HiveStatsCollectionOperator(**self.kwargs).execute, context={}) 
Example #24
Source File: test_hive.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_get_max_partition_from_valid_part_specs_and_invalid_filter_map(self):
        with self.assertRaises(AirflowException):
            HiveMetastoreHook._get_max_partition_from_part_specs(
                [{'key1': 'value1', 'key2': 'value2'},
                 {'key1': 'value3', 'key2': 'value4'}],
                'key1',
                {'key3': 'value5'}) 
Example #25
Source File: test_hive.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_get_max_partition_from_valid_part_specs_and_invalid_partition_key(self):
        with self.assertRaises(AirflowException):
            HiveMetastoreHook._get_max_partition_from_part_specs(
                [{'key1': 'value1', 'key2': 'value2'},
                 {'key1': 'value3', 'key2': 'value4'}],
                'key3',
                self.VALID_FILTER_MAP) 
Example #26
Source File: test_hive.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_get_max_partition_from_valid_part_specs_and_none_partition_key(self):
        with self.assertRaises(AirflowException):
            HiveMetastoreHook._get_max_partition_from_part_specs(
                [{'key1': 'value1', 'key2': 'value2'},
                 {'key1': 'value3', 'key2': 'value4'}],
                None,
                self.VALID_FILTER_MAP) 
Example #27
Source File: test_local_filesystem.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_missing_file(self, mock_exists):
        with self.assertRaisesRegex(
            AirflowException,
            re.escape("File a.json was not found. Check the configuration of your Secrets backend."),
        ):
            local_filesystem.load_connections("a.json") 
Example #28
Source File: test_local_filesystem.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_env_file_invalid_input(self, file_content, expected_connection_uris):
        with mock_local_file(json.dumps(file_content)):
            with self.assertRaisesRegex(AirflowException, re.escape(expected_connection_uris)):
                local_filesystem.load_connections("a.json") 
Example #29
Source File: test_spark_submit.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_resolve_spark_submit_env_vars_standalone_cluster_mode(self):

        def env_vars_exception_in_standalone_cluster_mode():
            # Given
            hook = SparkSubmitHook(conn_id='spark_standalone_cluster',
                                   env_vars={"bar": "foo"})

            # When
            hook._build_spark_submit_command(self._spark_job_file)

        # Then
        self.assertRaises(AirflowException,
                          env_vars_exception_in_standalone_cluster_mode) 
Example #30
Source File: test_livy.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_delete_batch_fail(self, mock):
        mock.register_uri(
            'DELETE', '//livy:8998/batches/{}'.format(BATCH_ID),
            json={},
            status_code=400,
            reason='ERROR'
        )

        hook = LivyHook()
        with self.assertRaises(AirflowException):
            hook.delete_batch(BATCH_ID)