Python airflow.utils.state.State.RUNNING Examples
The following are 30
code examples of airflow.utils.state.State.RUNNING().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
airflow.utils.state.State
, or try the search function
.
Example #1
Source File: test_pool.py From airflow with Apache License 2.0 | 7 votes |
def test_infinite_slots(self): pool = Pool(pool='test_pool', slots=-1) dag = DAG( dag_id='test_infinite_slots', start_date=DEFAULT_DATE, ) op1 = DummyOperator(task_id='dummy1', dag=dag, pool='test_pool') op2 = DummyOperator(task_id='dummy2', dag=dag, pool='test_pool') ti1 = TI(task=op1, execution_date=DEFAULT_DATE) ti2 = TI(task=op2, execution_date=DEFAULT_DATE) ti1.state = State.RUNNING ti2.state = State.QUEUED session = settings.Session session.add(pool) session.add(ti1) session.add(ti2) session.commit() session.close() self.assertEqual(float('inf'), pool.open_slots()) # pylint: disable=no-value-for-parameter self.assertEqual(1, pool.running_slots()) # pylint: disable=no-value-for-parameter self.assertEqual(1, pool.queued_slots()) # pylint: disable=no-value-for-parameter self.assertEqual(2, pool.occupied_slots()) # pylint: disable=no-value-for-parameter
Example #2
Source File: test_dagrun.py From airflow with Apache License 2.0 | 6 votes |
def test_clear_task_instances_for_backfill_dagrun(self): now = timezone.utcnow() session = settings.Session() dag_id = 'test_clear_task_instances_for_backfill_dagrun' dag = DAG(dag_id=dag_id, start_date=now) self.create_dag_run(dag, execution_date=now, is_backfill=True) task0 = DummyOperator(task_id='backfill_task_0', owner='test', dag=dag) ti0 = TI(task=task0, execution_date=now) ti0.run() qry = session.query(TI).filter( TI.dag_id == dag.dag_id).all() clear_task_instances(qry, session) session.commit() ti0.refresh_from_db() dr0 = session.query(DagRun).filter( DagRun.dag_id == dag_id, DagRun.execution_date == now ).first() self.assertEqual(dr0.state, State.RUNNING)
Example #3
Source File: test_pool.py From airflow with Apache License 2.0 | 6 votes |
def test_default_pool_open_slots(self): set_default_pool_slots(5) self.assertEqual(5, Pool.get_default_pool().open_slots()) dag = DAG( dag_id='test_default_pool_open_slots', start_date=DEFAULT_DATE, ) op1 = DummyOperator(task_id='dummy1', dag=dag) op2 = DummyOperator(task_id='dummy2', dag=dag, pool_slots=2) ti1 = TI(task=op1, execution_date=DEFAULT_DATE) ti2 = TI(task=op2, execution_date=DEFAULT_DATE) ti1.state = State.RUNNING ti2.state = State.QUEUED session = settings.Session session.add(ti1) session.add(ti2) session.commit() session.close() self.assertEqual(2, Pool.get_default_pool().open_slots())
Example #4
Source File: test_branch_operator.py From airflow with Apache License 2.0 | 6 votes |
def test_with_skip_in_branch_downstream_dependencies(self): self.branch_op = ChooseBranchOne(task_id="make_choice", dag=self.dag) self.branch_op >> self.branch_1 >> self.branch_2 self.branch_op >> self.branch_2 self.dag.clear() dagrun = self.dag.create_dagrun( run_type=DagRunType.MANUAL, start_date=timezone.utcnow(), execution_date=DEFAULT_DATE, state=State.RUNNING ) self.branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) tis = dagrun.get_task_instances() for ti in tis: if ti.task_id == 'make_choice': self.assertEqual(ti.state, State.SUCCESS) elif ti.task_id == 'branch_1': self.assertEqual(ti.state, State.NONE) elif ti.task_id == 'branch_2': self.assertEqual(ti.state, State.NONE) else: raise Exception
Example #5
Source File: test_dagrun.py From airflow with Apache License 2.0 | 6 votes |
def test_dagrun_no_deadlock_with_shutdown(self): session = settings.Session() dag = DAG('test_dagrun_no_deadlock_with_shutdown', start_date=DEFAULT_DATE) with dag: op1 = DummyOperator(task_id='upstream_task') op2 = DummyOperator(task_id='downstream_task') op2.set_upstream(op1) dr = dag.create_dagrun(run_id='test_dagrun_no_deadlock_with_shutdown', state=State.RUNNING, execution_date=DEFAULT_DATE, start_date=DEFAULT_DATE) upstream_ti = dr.get_task_instance(task_id='upstream_task') upstream_ti.set_state(State.SHUTDOWN, session=session) dr.update_state() self.assertEqual(dr.state, State.RUNNING)
Example #6
Source File: test_python.py From airflow with Apache License 2.0 | 6 votes |
def test_conflicting_kwargs(self): self.dag.create_dagrun( run_type=DagRunType.MANUAL, execution_date=DEFAULT_DATE, start_date=DEFAULT_DATE, state=State.RUNNING, external_trigger=False, ) # dag is not allowed since it is a reserved keyword def func(dag): # An ValueError should be triggered since we're using dag as a # reserved keyword raise RuntimeError("Should not be triggered, dag: {}".format(dag)) python_operator = PythonOperator( task_id='python_operator', op_args=[1], python_callable=func, dag=self.dag ) with self.assertRaises(ValueError) as context: python_operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) self.assertTrue('dag' in context.exception, "'dag' not found in the exception")
Example #7
Source File: test_python.py From airflow with Apache License 2.0 | 6 votes |
def test_context_with_conflicting_op_args(self): self.dag.create_dagrun( run_type=DagRunType.MANUAL, execution_date=DEFAULT_DATE, start_date=DEFAULT_DATE, state=State.RUNNING, external_trigger=False, ) def func(custom, dag): self.assertEqual(1, custom, "custom should be 1") self.assertIsNotNone(dag, "dag should be set") python_operator = PythonOperator( task_id='python_operator', op_kwargs={'custom': 1}, python_callable=func, dag=self.dag ) python_operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
Example #8
Source File: test_dot_renderer.py From airflow with Apache License 2.0 | 6 votes |
def test_should_render_dag_with_task_instances(self): dag = DAG(dag_id="DAG_ID") task_1 = BashOperator(dag=dag, start_date=START_DATE, task_id="first", bash_command="echo 1") task_2 = BashOperator(dag=dag, start_date=START_DATE, task_id="second", bash_command="echo 1") task_3 = PythonOperator( dag=dag, start_date=START_DATE, task_id="third", python_callable=mock.MagicMock() ) task_1 >> task_2 task_1 >> task_3 tis = [ TaskInstance(task_1, execution_date=START_DATE, state=State.SCHEDULED), TaskInstance(task_2, execution_date=START_DATE, state=State.SUCCESS), TaskInstance(task_3, execution_date=START_DATE, state=State.RUNNING), ] dot = dot_renderer.render_dag(dag, tis=tis) source = dot.source # Should render DAG title self.assertIn("label=DAG_ID", source) self.assertIn('first [color=black fillcolor=tan shape=rectangle style="filled,rounded"]', source) self.assertIn('second [color=white fillcolor=green shape=rectangle style="filled,rounded"]', source) self.assertIn('third [color=black fillcolor=lime shape=rectangle style="filled,rounded"]', source)
Example #9
Source File: test_python.py From airflow with Apache License 2.0 | 6 votes |
def test_fail_multiple_outputs_key_type(self): @task_decorator(multiple_outputs=True) def add_number(num: int): return {2: num} with self.dag: ret = add_number(2) self.dag.create_dagrun( run_id=DagRunType.MANUAL.value, execution_date=DEFAULT_DATE, start_date=DEFAULT_DATE, state=State.RUNNING ) with pytest.raises(AirflowException): # pylint: disable=maybe-no-member ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
Example #10
Source File: test_python.py From airflow with Apache License 2.0 | 6 votes |
def test_fail_multiple_outputs_no_dict(self): @task_decorator(multiple_outputs=True) def add_number(num: int): return num with self.dag: ret = add_number(2) self.dag.create_dagrun( run_id=DagRunType.MANUAL.value, execution_date=DEFAULT_DATE, start_date=DEFAULT_DATE, state=State.RUNNING ) with pytest.raises(AirflowException): # pylint: disable=maybe-no-member ret.operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
Example #11
Source File: test_mark_tasks.py From airflow with Apache License 2.0 | 6 votes |
def test_set_state_without_commit(self): date = self.execution_dates[0] dr = self._create_test_dag_run(State.RUNNING, date) self._set_default_task_instance_states(dr) will_be_altered = set_dag_run_state_to_running(self.dag1, date, commit=False) # None of the tasks will be altered. self.assertEqual(len(will_be_altered), 0) self._verify_dag_run_state(self.dag1, date, State.RUNNING) self._verify_task_instance_states_remain_default(dr) will_be_altered = set_dag_run_state_to_failed(self.dag1, date, commit=False) # Only the running task will be altered. self.assertEqual(len(will_be_altered), 1) self._verify_dag_run_state(self.dag1, date, State.RUNNING) self._verify_task_instance_states_remain_default(dr) will_be_altered = set_dag_run_state_to_success(self.dag1, date, commit=False) # All except the SUCCESS task should be altered. self.assertEqual(len(will_be_altered), 5) self._verify_dag_run_state(self.dag1, date, State.RUNNING) self._verify_task_instance_states_remain_default(dr)
Example #12
Source File: test_taskinstance.py From airflow with Apache License 2.0 | 6 votes |
def test_success_callback_no_race_condition(self): callback_wrapper = CallbackWrapper() dag = DAG('test_success_callback_no_race_condition', start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + datetime.timedelta(days=10)) task = DummyOperator(task_id='op', email='test@test.test', on_success_callback=callback_wrapper.success_handler, dag=dag) ti = TI(task=task, execution_date=datetime.datetime.now()) ti.state = State.RUNNING session = settings.Session() session.merge(ti) session.commit() callback_wrapper.wrap_task_instance(ti) ti._run_raw_task() self.assertTrue(callback_wrapper.callback_ran) self.assertEqual(callback_wrapper.task_state_in_callback, State.RUNNING) ti.refresh_from_db() self.assertEqual(ti.state, State.SUCCESS)
Example #13
Source File: test_taskinstance.py From airflow with Apache License 2.0 | 6 votes |
def test_execute_callback(self): called = False def on_execute_callable(context): nonlocal called called = True self.assertEqual( context['dag_run'].dag_id, 'test_dagrun_execute_callback' ) dag = DAG('test_execute_callback', start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + datetime.timedelta(days=10)) task = DummyOperator(task_id='op', email='test@test.test', on_execute_callback=on_execute_callable, dag=dag) ti = TI(task=task, execution_date=datetime.datetime.now()) ti.state = State.RUNNING session = settings.Session() session.merge(ti) session.commit() ti._run_raw_task() assert called ti.refresh_from_db() assert ti.state == State.SUCCESS
Example #14
Source File: test_mark_tasks.py From airflow with Apache License 2.0 | 6 votes |
def _verify_dag_run_dates(self, dag, date, state, middle_time, session=None): # When target state is RUNNING, we should set start_date, # otherwise we should set end_date. DR = DagRun dr = session.query(DR).filter( DR.dag_id == dag.dag_id, DR.execution_date == date ).one() if state == State.RUNNING: # Since the DAG is running, the start_date must be updated after creation self.assertGreater(dr.start_date, middle_time) # If the dag is still running, we don't have an end date self.assertIsNone(dr.end_date) else: # If the dag is not running, there must be an end time self.assertLess(dr.start_date, middle_time) self.assertGreater(dr.end_date, middle_time)
Example #15
Source File: test_taskinstance.py From airflow with Apache License 2.0 | 6 votes |
def test_echo_env_variables(self): dag = DAG('test_echo_env_variables', start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + datetime.timedelta(days=10)) op = PythonOperator(task_id='hive_in_python_op', dag=dag, python_callable=self._env_var_check_callback) dag.create_dagrun( run_type=DagRunType.MANUAL, execution_date=DEFAULT_DATE, start_date=DEFAULT_DATE, state=State.RUNNING, external_trigger=False) ti = TI(task=op, execution_date=DEFAULT_DATE) ti.state = State.RUNNING session = settings.Session() session.merge(ti) session.commit() ti._run_raw_task() ti.refresh_from_db() self.assertEqual(ti.state, State.SUCCESS)
Example #16
Source File: test_taskinstance.py From airflow with Apache License 2.0 | 6 votes |
def test_task_stats(self, stats_mock): dag = DAG('test_task_start_end_stats', start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + datetime.timedelta(days=10)) op = DummyOperator(task_id='dummy_op', dag=dag) dag.create_dagrun( run_id='manual__' + DEFAULT_DATE.isoformat(), execution_date=DEFAULT_DATE, start_date=DEFAULT_DATE, state=State.RUNNING, external_trigger=False) ti = TI(task=op, execution_date=DEFAULT_DATE) ti.state = State.RUNNING session = settings.Session() session.merge(ti) session.commit() ti._run_raw_task() ti.refresh_from_db() stats_mock.assert_called_with('ti.finish.{}.{}.{}'.format(dag.dag_id, op.task_id, ti.state)) self.assertIn(call('ti.start.{}.{}'.format(dag.dag_id, op.task_id)), stats_mock.mock_calls) self.assertEqual(stats_mock.call_count, 5)
Example #17
Source File: test_mark_tasks.py From airflow with Apache License 2.0 | 6 votes |
def setUp(self): clear_db_runs() drs = _create_dagruns(self.dag1, self.execution_dates, state=State.RUNNING, run_type=DagRunType.SCHEDULED) for dr in drs: dr.dag = self.dag1 dr.verify_integrity() drs = _create_dagruns(self.dag2, [self.dag2.default_args['start_date']], state=State.RUNNING, run_type=DagRunType.SCHEDULED) for dr in drs: dr.dag = self.dag2 dr.verify_integrity() drs = _create_dagruns(self.dag3, self.dag3_execution_dates, state=State.SUCCESS, run_type=DagRunType.MANUAL) for dr in drs: dr.dag = self.dag3 dr.verify_integrity()
Example #18
Source File: test_python.py From airflow with Apache License 2.0 | 6 votes |
def test_xcom_push(self): branch_op = BranchPythonOperator(task_id='make_choice', dag=self.dag, python_callable=lambda: 'branch_1') self.branch_1.set_upstream(branch_op) self.branch_2.set_upstream(branch_op) self.dag.clear() dr = self.dag.create_dagrun( run_type=DagRunType.MANUAL, start_date=timezone.utcnow(), execution_date=DEFAULT_DATE, state=State.RUNNING ) branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) tis = dr.get_task_instances() for ti in tis: if ti.task_id == 'make_choice': self.assertEqual( ti.xcom_pull(task_ids='make_choice'), 'branch_1')
Example #19
Source File: test_views.py From airflow with Apache License 2.0 | 6 votes |
def prepare_dagruns(self): dagbag = models.DagBag(include_examples=True) self.bash_dag = dagbag.dags['example_bash_operator'] self.sub_dag = dagbag.dags['example_subdag_operator'] self.bash_dagrun = self.bash_dag.create_dagrun( run_type=DagRunType.SCHEDULED, execution_date=self.default_date, start_date=timezone.utcnow(), state=State.RUNNING) self.sub_dagrun = self.sub_dag.create_dagrun( run_type=DagRunType.SCHEDULED, execution_date=self.default_date, start_date=timezone.utcnow(), state=State.RUNNING)
Example #20
Source File: test_views.py From airflow with Apache License 2.0 | 6 votes |
def prepare_dagruns(self): self.bash_dag = self.dagbag.dags['example_bash_operator'] self.sub_dag = self.dagbag.dags['example_subdag_operator'] self.xcom_dag = self.dagbag.dags['example_xcom'] self.bash_dagrun = self.bash_dag.create_dagrun( run_type=DagRunType.SCHEDULED, execution_date=self.EXAMPLE_DAG_DEFAULT_DATE, start_date=timezone.utcnow(), state=State.RUNNING) self.sub_dagrun = self.sub_dag.create_dagrun( run_type=DagRunType.SCHEDULED, execution_date=self.EXAMPLE_DAG_DEFAULT_DATE, start_date=timezone.utcnow(), state=State.RUNNING) self.xcom_dagrun = self.xcom_dag.create_dagrun( run_type=DagRunType.SCHEDULED, execution_date=self.EXAMPLE_DAG_DEFAULT_DATE, start_date=timezone.utcnow(), state=State.RUNNING)
Example #21
Source File: test_python.py From airflow with Apache License 2.0 | 5 votes |
def test_with_skip_in_branch_downstream_dependencies(self): branch_op = BranchPythonOperator(task_id='make_choice', dag=self.dag, python_callable=lambda: 'branch_1') branch_op >> self.branch_1 >> self.branch_2 branch_op >> self.branch_2 self.dag.clear() dr = self.dag.create_dagrun( run_type=DagRunType.MANUAL, start_date=timezone.utcnow(), execution_date=DEFAULT_DATE, state=State.RUNNING ) branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) tis = dr.get_task_instances() for ti in tis: if ti.task_id == 'make_choice': self.assertEqual(ti.state, State.SUCCESS) elif ti.task_id == 'branch_1': self.assertEqual(ti.state, State.NONE) elif ti.task_id == 'branch_2': self.assertEqual(ti.state, State.NONE) else: raise ValueError(f'Invalid task id {ti.task_id} found!')
Example #22
Source File: test_python.py From airflow with Apache License 2.0 | 5 votes |
def test_with_dag_run(self): branch_op = BranchPythonOperator(task_id='make_choice', dag=self.dag, python_callable=lambda: 'branch_1') self.branch_1.set_upstream(branch_op) self.branch_2.set_upstream(branch_op) self.dag.clear() dr = self.dag.create_dagrun( run_type=DagRunType.MANUAL, start_date=timezone.utcnow(), execution_date=DEFAULT_DATE, state=State.RUNNING ) branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) tis = dr.get_task_instances() for ti in tis: if ti.task_id == 'make_choice': self.assertEqual(ti.state, State.SUCCESS) elif ti.task_id == 'branch_1': self.assertEqual(ti.state, State.NONE) elif ti.task_id == 'branch_2': self.assertEqual(ti.state, State.SKIPPED) else: raise ValueError(f'Invalid task id {ti.task_id} found!')
Example #23
Source File: test_dagrun.py From airflow with Apache License 2.0 | 5 votes |
def test_get_task_instance_on_empty_dagrun(self): """ Make sure that a proper value is returned when a dagrun has no task instances """ dag = DAG( dag_id='test_get_task_instance_on_empty_dagrun', start_date=timezone.datetime(2017, 1, 1) ) ShortCircuitOperator( task_id='test_short_circuit_false', dag=dag, python_callable=lambda: False) session = settings.Session() now = timezone.utcnow() # Don't use create_dagrun since it will create the task instances too which we # don't want dag_run = models.DagRun( dag_id=dag.dag_id, run_type=DagRunType.MANUAL.value, execution_date=now, start_date=now, state=State.RUNNING, external_trigger=False, ) session.add(dag_run) session.commit() ti = dag_run.get_task_instance('test_short_circuit_false') self.assertEqual(None, ti)
Example #24
Source File: test_python.py From airflow with Apache License 2.0 | 5 votes |
def test_python_callable_arguments_are_templatized(self): """Test PythonOperator op_args are templatized""" recorded_calls = [] # Create a named tuple and ensure it is still preserved # after the rendering is done Named = namedtuple('Named', ['var1', 'var2']) named_tuple = Named('{{ ds }}', 'unchanged') task = PythonOperator( task_id='python_operator', # a Mock instance cannot be used as a callable function or test fails with a # TypeError: Object of type Mock is not JSON serializable python_callable=build_recording_function(recorded_calls), op_args=[ 4, date(2019, 1, 1), "dag {{dag.dag_id}} ran on {{ds}}.", named_tuple ], dag=self.dag) self.dag.create_dagrun( run_type=DagRunType.MANUAL, execution_date=DEFAULT_DATE, start_date=DEFAULT_DATE, state=State.RUNNING ) task.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE) ds_templated = DEFAULT_DATE.date().isoformat() self.assertEqual(1, len(recorded_calls)) self._assert_calls_equal( recorded_calls[0], Call(4, date(2019, 1, 1), "dag {} ran on {}.".format(self.dag.dag_id, ds_templated), Named(ds_templated, 'unchanged')) )
Example #25
Source File: test_dagrun.py From airflow with Apache License 2.0 | 5 votes |
def test_dagrun_find(self): session = settings.Session() now = timezone.utcnow() dag_id1 = "test_dagrun_find_externally_triggered" dag_run = models.DagRun( dag_id=dag_id1, run_type=DagRunType.MANUAL.value, execution_date=now, start_date=now, state=State.RUNNING, external_trigger=True, ) session.add(dag_run) dag_id2 = "test_dagrun_find_not_externally_triggered" dag_run = models.DagRun( dag_id=dag_id2, run_type=DagRunType.MANUAL.value, execution_date=now, start_date=now, state=State.RUNNING, external_trigger=False, ) session.add(dag_run) session.commit() self.assertEqual(1, len(models.DagRun.find(dag_id=dag_id1, external_trigger=True))) self.assertEqual(0, len(models.DagRun.find(dag_id=dag_id1, external_trigger=False))) self.assertEqual(0, len(models.DagRun.find(dag_id=dag_id2, external_trigger=True))) self.assertEqual(1, len(models.DagRun.find(dag_id=dag_id2, external_trigger=False)))
Example #26
Source File: test_dagrun.py From airflow with Apache License 2.0 | 5 votes |
def test_dagrun_failure_callback(self): def on_failure_callable(context): self.assertEqual( context['dag_run'].dag_id, 'test_dagrun_failure_callback' ) dag = DAG( dag_id='test_dagrun_failure_callback', start_date=datetime.datetime(2017, 1, 1), on_failure_callback=on_failure_callable, ) dag_task1 = DummyOperator( task_id='test_state_succeeded1', dag=dag) dag_task2 = DummyOperator( task_id='test_state_failed2', dag=dag) initial_task_states = { 'test_state_succeeded1': State.SUCCESS, 'test_state_failed2': State.FAILED, } dag_task1.set_downstream(dag_task2) dag_run = self.create_dag_run(dag=dag, state=State.RUNNING, task_states=initial_task_states) dag_run.update_state() self.assertEqual(State.FAILED, dag_run.state)
Example #27
Source File: test_dagrun.py From airflow with Apache License 2.0 | 5 votes |
def test_dagrun_success_callback(self): def on_success_callable(context): self.assertEqual( context['dag_run'].dag_id, 'test_dagrun_success_callback' ) dag = DAG( dag_id='test_dagrun_success_callback', start_date=datetime.datetime(2017, 1, 1), on_success_callback=on_success_callable, ) dag_task1 = DummyOperator( task_id='test_state_succeeded1', dag=dag) dag_task2 = DummyOperator( task_id='test_state_succeeded2', dag=dag) dag_task1.set_downstream(dag_task2) initial_task_states = { 'test_state_succeeded1': State.SUCCESS, 'test_state_succeeded2': State.SUCCESS, } dag_run = self.create_dag_run(dag=dag, state=State.RUNNING, task_states=initial_task_states) dag_run.update_state() self.assertEqual(State.SUCCESS, dag_run.state)
Example #28
Source File: test_dagrun.py From airflow with Apache License 2.0 | 5 votes |
def test_dagrun_no_deadlock_with_depends_on_past(self): session = settings.Session() dag = DAG('test_dagrun_no_deadlock', start_date=DEFAULT_DATE) with dag: DummyOperator(task_id='dop', depends_on_past=True) DummyOperator(task_id='tc', task_concurrency=1) dag.clear() dr = dag.create_dagrun(run_id='test_dagrun_no_deadlock_1', state=State.RUNNING, execution_date=DEFAULT_DATE, start_date=DEFAULT_DATE) dr2 = dag.create_dagrun(run_id='test_dagrun_no_deadlock_2', state=State.RUNNING, execution_date=DEFAULT_DATE + datetime.timedelta(days=1), start_date=DEFAULT_DATE + datetime.timedelta(days=1)) ti1_op1 = dr.get_task_instance(task_id='dop') dr2.get_task_instance(task_id='dop') ti2_op1 = dr.get_task_instance(task_id='tc') dr.get_task_instance(task_id='tc') ti1_op1.set_state(state=State.RUNNING, session=session) dr.update_state() dr2.update_state() self.assertEqual(dr.state, State.RUNNING) self.assertEqual(dr2.state, State.RUNNING) ti2_op1.set_state(state=State.RUNNING, session=session) dr.update_state() dr2.update_state() self.assertEqual(dr.state, State.RUNNING) self.assertEqual(dr2.state, State.RUNNING)
Example #29
Source File: test_dagrun.py From airflow with Apache License 2.0 | 5 votes |
def test_dagrun_deadlock(self): session = settings.Session() dag = DAG( 'text_dagrun_deadlock', start_date=DEFAULT_DATE, default_args={'owner': 'owner1'}) with dag: op1 = DummyOperator(task_id='A') op2 = DummyOperator(task_id='B') op2.trigger_rule = TriggerRule.ONE_FAILED op2.set_upstream(op1) dag.clear() now = timezone.utcnow() dr = dag.create_dagrun(run_id='test_dagrun_deadlock', state=State.RUNNING, execution_date=now, start_date=now) ti_op1 = dr.get_task_instance(task_id=op1.task_id) ti_op1.set_state(state=State.SUCCESS, session=session) ti_op2 = dr.get_task_instance(task_id=op2.task_id) ti_op2.set_state(state=State.NONE, session=session) dr.update_state() self.assertEqual(dr.state, State.RUNNING) ti_op2.set_state(state=State.NONE, session=session) op2.trigger_rule = 'invalid' dr.update_state() self.assertEqual(dr.state, State.FAILED)
Example #30
Source File: test_dagrun.py From airflow with Apache License 2.0 | 5 votes |
def test_dagrun_success_when_all_skipped(self): """ Tests that a DAG run succeeds when all tasks are skipped """ dag = DAG( dag_id='test_dagrun_success_when_all_skipped', start_date=timezone.datetime(2017, 1, 1) ) dag_task1 = ShortCircuitOperator( task_id='test_short_circuit_false', dag=dag, python_callable=lambda: False) dag_task2 = DummyOperator( task_id='test_state_skipped1', dag=dag) dag_task3 = DummyOperator( task_id='test_state_skipped2', dag=dag) dag_task1.set_downstream(dag_task2) dag_task2.set_downstream(dag_task3) initial_task_states = { 'test_short_circuit_false': State.SUCCESS, 'test_state_skipped1': State.SKIPPED, 'test_state_skipped2': State.SKIPPED, } dag_run = self.create_dag_run(dag=dag, state=State.RUNNING, task_states=initial_task_states) dag_run.update_state() self.assertEqual(State.SUCCESS, dag_run.state)