Python airflow.utils.state.State.SUCCESS Examples

The following are 30 code examples of airflow.utils.state.State.SUCCESS(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module airflow.utils.state.State , or try the search function .
Example #1
Source File: test_taskinstance.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_previous_ti(self, _, schedule_interval, catchup) -> None:

        scenario = [State.SUCCESS, State.FAILED, State.SUCCESS]

        ti_list = self._test_previous_dates_setup(schedule_interval, catchup, scenario)

        self.assertIsNone(ti_list[0].get_previous_ti())

        self.assertEqual(
            ti_list[2].get_previous_ti().execution_date,
            ti_list[1].execution_date
        )

        self.assertNotEqual(
            ti_list[2].get_previous_ti().execution_date,
            ti_list[0].execution_date
        ) 
Example #2
Source File: test_dagrun.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_depends_on_past(self, prev_ti_state, is_ti_success):
        dag_id = 'test_depends_on_past'

        dag = self.dagbag.get_dag(dag_id)
        task = dag.tasks[0]

        self.create_dag_run(dag, execution_date=timezone.datetime(2016, 1, 1, 0, 0, 0))
        self.create_dag_run(dag, execution_date=timezone.datetime(2016, 1, 2, 0, 0, 0))

        prev_ti = TI(task, timezone.datetime(2016, 1, 1, 0, 0, 0))
        ti = TI(task, timezone.datetime(2016, 1, 2, 0, 0, 0))

        prev_ti.set_state(prev_ti_state)
        ti.set_state(State.QUEUED)
        ti.run()
        self.assertEqual(ti.state == State.SUCCESS, is_ti_success) 
Example #3
Source File: test_task_command.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_local_run(self):
        args = self.parser.parse_args([
            'tasks',
            'run',
            'example_python_operator',
            'print_the_context',
            '2018-04-27T08:39:51.298439+00:00',
            '--interactive',
            '--subdir',
            '/root/dags/example_python_operator.py'
        ])

        dag = get_dag(args.subdir, args.dag_id)
        reset(dag.dag_id)

        task_command.task_run(args)
        task = dag.get_task(task_id=args.task_id)
        ti = TaskInstance(task, args.execution_date)
        ti.refresh_from_db()
        state = ti.current_state()
        self.assertEqual(state, State.SUCCESS) 
Example #4
Source File: test_external_task_sensor.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_external_dag_sensor(self):
        other_dag = DAG(
            'other_dag',
            default_args=self.args,
            end_date=DEFAULT_DATE,
            schedule_interval='@once')
        other_dag.create_dagrun(
            run_id='test',
            start_date=DEFAULT_DATE,
            execution_date=DEFAULT_DATE,
            state=State.SUCCESS)
        op = ExternalTaskSensor(
            task_id='test_external_dag_sensor_check',
            external_dag_id='other_dag',
            external_task_id=None,
            dag=self.dag
        )
        op.run(
            start_date=DEFAULT_DATE,
            end_date=DEFAULT_DATE,
            ignore_ti_state=True
        ) 
Example #5
Source File: test_prev_dagrun_dep.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_not_depends_on_past(self):
        """
        If depends on past isn't set in the task then the previous dagrun should be
        ignored, even though there is no previous_ti which would normally fail the dep
        """
        task = self._get_task(depends_on_past=False,
                              start_date=datetime(2016, 1, 1),
                              wait_for_downstream=False)
        prev_ti = Mock(task=task, state=State.SUCCESS,
                       are_dependents_done=Mock(return_value=True),
                       execution_date=datetime(2016, 1, 2))
        ti = Mock(task=task, previous_ti=prev_ti,
                  execution_date=datetime(2016, 1, 3))
        dep_context = DepContext(ignore_depends_on_past=False)

        self.assertTrue(PrevDagrunDep().is_met(ti=ti, dep_context=dep_context)) 
Example #6
Source File: test_task_command.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_logging_with_run_task_subprocess(self):
        # We are not using self.assertLogs as we want to verify what actually is stored in the Log file
        # as that is what gets displayed
        with conf_vars({('core', 'dags_folder'): os.path.join(ROOT_FOLDER, f"tests/dags/{self.dag_id}")}):
            task_command.task_run(self.parser.parse_args([
                'tasks', 'run', self.dag_id, self.task_id, '--local', self.execution_date_str]))

        with open(self.ti_log_file_path) as l_file:
            logs = l_file.read()

        print(logs)     # In case of a test failures this line would show detailed log
        logs_list = logs.splitlines()

        self.assertIn(f"Subtask {self.task_id}", logs)
        self.assertIn("base_task_runner.py", logs)
        self.assert_log_line("Log from DAG Logger", logs_list)
        self.assert_log_line("Log from TI Logger", logs_list)
        self.assert_log_line("Log from Print statement", logs_list, expect_from_logging_mixin=True)

        self.assertIn(f"INFO - Running: ['airflow', 'tasks', 'run', '{self.dag_id}', "
                      f"'{self.task_id}', '{self.execution_date_str}',", logs)
        self.assertIn(f"INFO - Marking task as SUCCESS.dag_id={self.dag_id}, "
                      f"task_id={self.task_id}, execution_date=20170101T000000", logs) 
Example #7
Source File: test_base_executor.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_get_event_buffer(self):
        executor = BaseExecutor()

        date = datetime.utcnow()
        try_number = 1
        key1 = ("my_dag1", "my_task1", date, try_number)
        key2 = ("my_dag2", "my_task1", date, try_number)
        key3 = ("my_dag2", "my_task2", date, try_number)
        state = State.SUCCESS
        executor.event_buffer[key1] = state, None
        executor.event_buffer[key2] = state, None
        executor.event_buffer[key3] = state, None

        self.assertEqual(len(executor.get_event_buffer(("my_dag1",))), 1)
        self.assertEqual(len(executor.get_event_buffer()), 2)
        self.assertEqual(len(executor.event_buffer), 0) 
Example #8
Source File: test_mark_tasks.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_mark_tasks_past(self):
        # set one task to success towards end of scheduled dag runs
        snapshot = TestMarkTasks.snapshot_state(self.dag1, self.execution_dates)
        task = self.dag1.get_task("runme_1")
        altered = set_state(tasks=[task], execution_date=self.execution_dates[1],
                            upstream=False, downstream=False, future=False,
                            past=True, state=State.SUCCESS, commit=True)
        self.assertEqual(len(altered), 2)
        self.verify_state(self.dag1, [task.task_id], self.execution_dates, State.SUCCESS, snapshot)

        snapshot = TestMarkTasks.snapshot_state(self.dag3, self.dag3_execution_dates)
        task = self.dag3.get_task("run_this")
        altered = set_state(tasks=[task], execution_date=self.dag3_execution_dates[1],
                            upstream=False, downstream=False, future=False,
                            past=True, state=State.FAILED, commit=True)
        self.assertEqual(len(altered), 2)
        self.verify_state(self.dag3, [task.task_id], self.dag3_execution_dates[:2], State.FAILED, snapshot)
        self.verify_state(self.dag3, [task.task_id], [self.dag3_execution_dates[2]], None, snapshot) 
Example #9
Source File: test_mark_tasks.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_mark_tasks_future(self):
        # set one task to success towards end of scheduled dag runs
        snapshot = TestMarkTasks.snapshot_state(self.dag1, self.execution_dates)
        task = self.dag1.get_task("runme_1")
        altered = set_state(tasks=[task], execution_date=self.execution_dates[0],
                            upstream=False, downstream=False, future=True,
                            past=False, state=State.SUCCESS, commit=True)
        self.assertEqual(len(altered), 2)
        self.verify_state(self.dag1, [task.task_id], self.execution_dates, State.SUCCESS, snapshot)

        snapshot = TestMarkTasks.snapshot_state(self.dag3, self.dag3_execution_dates)
        task = self.dag3.get_task("run_this")
        altered = set_state(tasks=[task], execution_date=self.dag3_execution_dates[1],
                            upstream=False, downstream=False, future=True,
                            past=False, state=State.FAILED, commit=True)
        self.assertEqual(len(altered), 2)
        self.verify_state(self.dag3, [task.task_id], [self.dag3_execution_dates[0]], None, snapshot)
        self.verify_state(self.dag3, [task.task_id], self.dag3_execution_dates[1:], State.FAILED, snapshot) 
Example #10
Source File: test_celery_executor.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_should_support_kv_backend(self, mock_mget):
        with _prepare_app():
            mock_backend = BaseKeyValueStoreBackend(app=celery_executor.app)
            with mock.patch.object(celery_executor.app, 'backend', mock_backend):
                fetcher = BulkStateFetcher()
                result = fetcher.get_many([
                    mock.MagicMock(task_id="123"),
                    mock.MagicMock(task_id="456"),
                ])

        # Assert called - ignore order
        mget_args, _ = mock_mget.call_args
        self.assertEqual(set(mget_args[0]), {b'celery-task-meta-456', b'celery-task-meta-123'})
        mock_mget.assert_called_once_with(mock.ANY)

        self.assertEqual(result, {'123': ('SUCCESS', None), '456': ("PENDING", None)}) 
Example #11
Source File: test_celery_executor.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_should_support_db_backend(self, mock_session):
        with _prepare_app():
            mock_backend = DatabaseBackend(app=celery_executor.app, url="sqlite3://")

            with mock.patch.object(celery_executor.app, 'backend', mock_backend):
                mock_session = mock_backend.ResultSession.return_value  # pylint: disable=no-member
                mock_session.query.return_value.filter.return_value.all.return_value = [
                    mock.MagicMock(**{"to_dict.return_value": {"status": "SUCCESS", "task_id": "123"}})
                ]

        fetcher = BulkStateFetcher()
        result = fetcher.get_many([
            mock.MagicMock(task_id="123"),
            mock.MagicMock(task_id="456"),
        ])

        self.assertEqual(result, {'123': ('SUCCESS', None), '456': ("PENDING", None)}) 
Example #12
Source File: test_mark_tasks.py    From airflow with Apache License 2.0 6 votes vote down vote up
def setUp(self):
        clear_db_runs()
        drs = _create_dagruns(self.dag1, self.execution_dates,
                              state=State.RUNNING,
                              run_type=DagRunType.SCHEDULED)
        for dr in drs:
            dr.dag = self.dag1
            dr.verify_integrity()

        drs = _create_dagruns(self.dag2,
                              [self.dag2.default_args['start_date']],
                              state=State.RUNNING,
                              run_type=DagRunType.SCHEDULED)

        for dr in drs:
            dr.dag = self.dag2
            dr.verify_integrity()

        drs = _create_dagruns(self.dag3,
                              self.dag3_execution_dates,
                              state=State.SUCCESS,
                              run_type=DagRunType.MANUAL)
        for dr in drs:
            dr.dag = self.dag3
            dr.verify_integrity() 
Example #13
Source File: test_dagrun.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_wait_for_downstream(self, prev_ti_state, is_ti_success):
        dag_id = 'test_wait_for_downstream'
        dag = self.dagbag.get_dag(dag_id)
        upstream, downstream = dag.tasks

        # For ti.set_state() to work, the DagRun has to exist,
        # Otherwise ti.previous_ti returns an unpersisted TI
        self.create_dag_run(dag, execution_date=timezone.datetime(2016, 1, 1, 0, 0, 0))
        self.create_dag_run(dag, execution_date=timezone.datetime(2016, 1, 2, 0, 0, 0))

        prev_ti_downstream = TI(task=downstream, execution_date=timezone.datetime(2016, 1, 1, 0, 0, 0))
        ti = TI(task=upstream, execution_date=timezone.datetime(2016, 1, 2, 0, 0, 0))
        prev_ti = ti.get_previous_ti()
        prev_ti.set_state(State.SUCCESS)
        self.assertEqual(prev_ti.state, State.SUCCESS)

        prev_ti_downstream.set_state(prev_ti_state)
        ti.set_state(State.QUEUED)
        ti.run()
        self.assertEqual(ti.state == State.SUCCESS, is_ti_success) 
Example #14
Source File: test_mark_tasks.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_mark_tasks_subdag(self):
        # set one task to success towards end of scheduled dag runs
        task = self.dag2.get_task("section-1")
        relatives = task.get_flat_relatives(upstream=False)
        task_ids = [t.task_id for t in relatives]
        task_ids.append(task.task_id)

        altered = set_state(tasks=[task], execution_date=self.execution_dates[0],
                            upstream=False, downstream=True, future=False,
                            past=False, state=State.SUCCESS, commit=True)
        self.assertEqual(len(altered), 14)

        # cannot use snapshot here as that will require drilling down the
        # the sub dag tree essentially recreating the same code as in the
        # tested logic.
        self.verify_state(self.dag2, task_ids, [self.execution_dates[0]],
                          State.SUCCESS, []) 
Example #15
Source File: test_subdag_operator.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_execute_skip_if_dagrun_success(self):
        """
        When there is an existing DagRun in SUCCESS state, skip the execution.
        """
        dag = DAG('parent', default_args=default_args)
        subdag = DAG('parent.test', default_args=default_args)

        subdag.create_dagrun = Mock()
        subdag_task = SubDagOperator(task_id='test', subdag=subdag, dag=dag, poke_interval=1)
        subdag_task._get_dagrun = Mock()
        subdag_task._get_dagrun.return_value = self.dag_run_success

        subdag_task.pre_execute(context={'execution_date': DEFAULT_DATE})
        subdag_task.execute(context={'execution_date': DEFAULT_DATE})
        subdag_task.post_execute(context={'execution_date': DEFAULT_DATE})

        subdag.create_dagrun.assert_not_called()
        self.assertEqual(3, len(subdag_task._get_dagrun.mock_calls)) 
Example #16
Source File: test_prev_dagrun_dep.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_context_ignore_depends_on_past(self):
        """
        If the context overrides depends_on_past then the dep should be met,
        even though there is no previous_ti which would normally fail the dep
        """
        task = self._get_task(depends_on_past=True,
                              start_date=datetime(2016, 1, 1),
                              wait_for_downstream=False)
        prev_ti = Mock(task=task, state=State.SUCCESS,
                       are_dependents_done=Mock(return_value=True),
                       execution_date=datetime(2016, 1, 2))
        ti = Mock(task=task, previous_ti=prev_ti,
                  execution_date=datetime(2016, 1, 3))
        dep_context = DepContext(ignore_depends_on_past=True)

        self.assertTrue(PrevDagrunDep().is_met(ti=ti, dep_context=dep_context)) 
Example #17
Source File: test_prev_dagrun_dep.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_failed_wait_for_downstream(self):
        """
        If the previous TI specified to wait for the downstream tasks of the
        previous dagrun then it should fail this dep if the downstream TIs of
        the previous TI are not done.
        """
        task = self._get_task(depends_on_past=True,
                              start_date=datetime(2016, 1, 1),
                              wait_for_downstream=True)
        prev_ti = Mock(state=State.SUCCESS,
                       are_dependents_done=Mock(return_value=False))
        ti = Mock(task=task, previous_ti=prev_ti,
                  execution_date=datetime(2016, 1, 2))
        dep_context = DepContext(ignore_depends_on_past=False)

        self.assertFalse(PrevDagrunDep().is_met(ti=ti, dep_context=dep_context)) 
Example #18
Source File: test_dot_renderer.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_should_render_dag_with_task_instances(self):
        dag = DAG(dag_id="DAG_ID")
        task_1 = BashOperator(dag=dag, start_date=START_DATE, task_id="first", bash_command="echo 1")
        task_2 = BashOperator(dag=dag, start_date=START_DATE, task_id="second", bash_command="echo 1")
        task_3 = PythonOperator(
            dag=dag, start_date=START_DATE, task_id="third", python_callable=mock.MagicMock()
        )
        task_1 >> task_2
        task_1 >> task_3
        tis = [
            TaskInstance(task_1, execution_date=START_DATE, state=State.SCHEDULED),
            TaskInstance(task_2, execution_date=START_DATE, state=State.SUCCESS),
            TaskInstance(task_3, execution_date=START_DATE, state=State.RUNNING),
        ]
        dot = dot_renderer.render_dag(dag, tis=tis)
        source = dot.source
        # Should render DAG title
        self.assertIn("label=DAG_ID", source)
        self.assertIn('first [color=black fillcolor=tan shape=rectangle style="filled,rounded"]', source)
        self.assertIn('second [color=white fillcolor=green shape=rectangle style="filled,rounded"]', source)
        self.assertIn('third [color=black fillcolor=lime shape=rectangle style="filled,rounded"]', source) 
Example #19
Source File: test_taskinstance.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_previous_start_date_success(self, _, schedule_interval, catchup) -> None:

        scenario = [State.FAILED, State.SUCCESS, State.FAILED, State.SUCCESS]

        ti_list = self._test_previous_dates_setup(schedule_interval, catchup, scenario)

        self.assertIsNone(ti_list[0].get_previous_start_date(state=State.SUCCESS))
        self.assertIsNone(ti_list[1].get_previous_start_date(state=State.SUCCESS))
        self.assertEqual(
            ti_list[3].get_previous_start_date(state=State.SUCCESS),
            ti_list[1].start_date,
        )
        self.assertNotEqual(
            ti_list[3].get_previous_start_date(state=State.SUCCESS),
            ti_list[2].start_date,
        ) 
Example #20
Source File: test_taskinstance.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_previous_execution_date_success(self, _, schedule_interval, catchup) -> None:

        scenario = [State.FAILED, State.SUCCESS, State.FAILED, State.SUCCESS]

        ti_list = self._test_previous_dates_setup(schedule_interval, catchup, scenario)

        self.assertIsNone(ti_list[0].get_previous_execution_date(state=State.SUCCESS))
        self.assertIsNone(ti_list[1].get_previous_execution_date(state=State.SUCCESS))
        self.assertEqual(
            ti_list[3].get_previous_execution_date(state=State.SUCCESS),
            ti_list[1].execution_date
        )
        self.assertNotEqual(
            ti_list[3].get_previous_execution_date(state=State.SUCCESS),
            ti_list[2].execution_date
        ) 
Example #21
Source File: test_kubernetes_pod.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_image_pull_secrets_correctly_set(self, mock_client, monitor_mock, start_mock):
        from airflow.utils.state import State

        fake_pull_secrets = "fakeSecret"
        k = KubernetesPodOperator(
            namespace='default',
            image="ubuntu:16.04",
            cmds=["bash", "-cx"],
            arguments=["echo 10"],
            labels={"foo": "bar"},
            name="test",
            task_id="task",
            in_cluster=False,
            do_xcom_push=False,
            image_pull_secrets=fake_pull_secrets,
            cluster_context='default',
        )
        monitor_mock.return_value = (State.SUCCESS, None)
        context = self.create_context(k)
        k.execute(context=context)
        self.assertEqual(
            start_mock.call_args[0][0].spec.image_pull_secrets,
            [k8s.V1LocalObjectReference(name=fake_pull_secrets)]
        ) 
Example #22
Source File: test_branch_operator.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_with_skip_in_branch_downstream_dependencies(self):
        self.branch_op = ChooseBranchOne(task_id="make_choice", dag=self.dag)
        self.branch_op >> self.branch_1 >> self.branch_2
        self.branch_op >> self.branch_2
        self.dag.clear()

        dagrun = self.dag.create_dagrun(
            run_type=DagRunType.MANUAL,
            start_date=timezone.utcnow(),
            execution_date=DEFAULT_DATE,
            state=State.RUNNING
        )

        self.branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)

        tis = dagrun.get_task_instances()
        for ti in tis:
            if ti.task_id == 'make_choice':
                self.assertEqual(ti.state, State.SUCCESS)
            elif ti.task_id == 'branch_1':
                self.assertEqual(ti.state, State.NONE)
            elif ti.task_id == 'branch_2':
                self.assertEqual(ti.state, State.NONE)
            else:
                raise Exception 
Example #23
Source File: test_taskinstance.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_mark_non_runnable_task_as_success(self):
        """
        test that running task with mark_success param update task state
        as SUCCESS without running task despite it fails dependency checks.
        """
        non_runnable_state = (
            set(State.task_states) - RUNNABLE_STATES - set(State.SUCCESS)).pop()
        dag = models.DAG(dag_id='test_mark_non_runnable_task_as_success')
        task = DummyOperator(
            task_id='test_mark_non_runnable_task_as_success_op',
            dag=dag,
            pool='test_pool',
            owner='airflow',
            start_date=timezone.datetime(2016, 2, 1, 0, 0, 0))
        ti = TI(
            task=task, execution_date=timezone.utcnow(), state=non_runnable_state)
        # TI.run() will sync from DB before validating deps.
        with create_session() as session:
            session.add(ti)
            session.commit()
        ti.run(mark_success=True)
        self.assertEqual(ti.state, State.SUCCESS) 
Example #24
Source File: test_branch_operator.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_with_dag_run(self):
        self.branch_op = ChooseBranchOne(task_id="make_choice", dag=self.dag)
        self.branch_1.set_upstream(self.branch_op)
        self.branch_2.set_upstream(self.branch_op)
        self.dag.clear()

        dagrun = self.dag.create_dagrun(
            run_type=DagRunType.MANUAL,
            start_date=timezone.utcnow(),
            execution_date=DEFAULT_DATE,
            state=State.RUNNING
        )

        self.branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)

        tis = dagrun.get_task_instances()
        for ti in tis:
            if ti.task_id == 'make_choice':
                self.assertEqual(ti.state, State.SUCCESS)
            elif ti.task_id == 'branch_1':
                self.assertEqual(ti.state, State.NONE)
            elif ti.task_id == 'branch_2':
                self.assertEqual(ti.state, State.SKIPPED)
            else:
                raise Exception 
Example #25
Source File: test_taskinstance.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_run_pooling_task_with_mark_success(self):
        """
        test that running task in an existing pool with mark_success param
        update task state as SUCCESS without running task
        despite it fails dependency checks.
        """
        dag = models.DAG(dag_id='test_run_pooling_task_with_mark_success')
        task = DummyOperator(
            task_id='test_run_pooling_task_with_mark_success_op',
            dag=dag,
            pool='test_pool',
            owner='airflow',
            start_date=timezone.datetime(2016, 2, 1, 0, 0, 0))
        ti = TI(
            task=task, execution_date=timezone.utcnow())
        ti.run(mark_success=True)
        self.assertEqual(ti.state, State.SUCCESS) 
Example #26
Source File: test_branch_operator.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_without_dag_run(self):
        """This checks the defensive against non existent tasks in a dag run"""
        self.branch_op = ChooseBranchOne(task_id="make_choice", dag=self.dag)
        self.branch_1.set_upstream(self.branch_op)
        self.branch_2.set_upstream(self.branch_op)
        self.dag.clear()

        self.branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)

        with create_session() as session:
            tis = session.query(TI).filter(
                TI.dag_id == self.dag.dag_id,
                TI.execution_date == DEFAULT_DATE
            )

            for ti in tis:
                if ti.task_id == 'make_choice':
                    self.assertEqual(ti.state, State.SUCCESS)
                elif ti.task_id == 'branch_1':
                    # should exist with state None
                    self.assertEqual(ti.state, State.NONE)
                elif ti.task_id == 'branch_2':
                    self.assertEqual(ti.state, State.SKIPPED)
                else:
                    raise Exception 
Example #27
Source File: test_taskinstance.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_previous_ti_success(self, _, schedule_interval, catchup) -> None:

        scenario = [State.FAILED, State.SUCCESS, State.FAILED, State.SUCCESS]

        ti_list = self._test_previous_dates_setup(schedule_interval, catchup, scenario)

        self.assertIsNone(ti_list[0].get_previous_ti(state=State.SUCCESS))
        self.assertIsNone(ti_list[1].get_previous_ti(state=State.SUCCESS))

        self.assertEqual(
            ti_list[3].get_previous_ti(state=State.SUCCESS).execution_date,
            ti_list[1].execution_date
        )

        self.assertNotEqual(
            ti_list[3].get_previous_ti(state=State.SUCCESS).execution_date,
            ti_list[2].execution_date
        ) 
Example #28
Source File: test_prev_dagrun_dep.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_all_met(self):
        """
        Test to make sure all of the conditions for the dep are met
        """
        task = self._get_task(depends_on_past=True,
                              start_date=datetime(2016, 1, 1),
                              wait_for_downstream=True)
        prev_ti = Mock(state=State.SUCCESS,
                       are_dependents_done=Mock(return_value=True))
        ti = Mock(
            task=task,
            execution_date=datetime(2016, 1, 2),
            **{'get_previous_ti.return_value': prev_ti}
        )
        dep_context = DepContext(ignore_depends_on_past=False)

        self.assertTrue(PrevDagrunDep().is_met(ti=ti, dep_context=dep_context)) 
Example #29
Source File: test_mark_tasks.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_set_state_without_commit(self):
        date = self.execution_dates[0]
        dr = self._create_test_dag_run(State.RUNNING, date)
        self._set_default_task_instance_states(dr)

        will_be_altered = set_dag_run_state_to_running(self.dag1, date, commit=False)

        # None of the tasks will be altered.
        self.assertEqual(len(will_be_altered), 0)
        self._verify_dag_run_state(self.dag1, date, State.RUNNING)
        self._verify_task_instance_states_remain_default(dr)

        will_be_altered = set_dag_run_state_to_failed(self.dag1, date, commit=False)

        # Only the running task will be altered.
        self.assertEqual(len(will_be_altered), 1)
        self._verify_dag_run_state(self.dag1, date, State.RUNNING)
        self._verify_task_instance_states_remain_default(dr)

        will_be_altered = set_dag_run_state_to_success(self.dag1, date, commit=False)

        # All except the SUCCESS task should be altered.
        self.assertEqual(len(will_be_altered), 5)
        self._verify_dag_run_state(self.dag1, date, State.RUNNING)
        self._verify_task_instance_states_remain_default(dr) 
Example #30
Source File: test_valid_state_dep.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_no_valid_states(self):
        """
        If there are no valid states the dependency should throw
        """
        ti = Mock(state=State.SUCCESS, end_date=datetime(2016, 1, 1))
        with self.assertRaises(AirflowException):
            ValidStateDep({}).is_met(ti=ti)