Python airflow.operators.dummy_operator.DummyOperator() Examples
The following are 30
code examples of airflow.operators.dummy_operator.DummyOperator().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
airflow.operators.dummy_operator
, or try the search function
.
Example #1
Source File: test_pool.py From airflow with Apache License 2.0 | 7 votes |
def test_infinite_slots(self): pool = Pool(pool='test_pool', slots=-1) dag = DAG( dag_id='test_infinite_slots', start_date=DEFAULT_DATE, ) op1 = DummyOperator(task_id='dummy1', dag=dag, pool='test_pool') op2 = DummyOperator(task_id='dummy2', dag=dag, pool='test_pool') ti1 = TI(task=op1, execution_date=DEFAULT_DATE) ti2 = TI(task=op2, execution_date=DEFAULT_DATE) ti1.state = State.RUNNING ti2.state = State.QUEUED session = settings.Session session.add(pool) session.add(ti1) session.add(ti2) session.commit() session.close() self.assertEqual(float('inf'), pool.open_slots()) # pylint: disable=no-value-for-parameter self.assertEqual(1, pool.running_slots()) # pylint: disable=no-value-for-parameter self.assertEqual(1, pool.queued_slots()) # pylint: disable=no-value-for-parameter self.assertEqual(2, pool.occupied_slots()) # pylint: disable=no-value-for-parameter
Example #2
Source File: test_helpers.py From airflow with Apache License 2.0 | 7 votes |
def test_render_log_filename(self): try_number = 1 dag_id = 'test_render_log_filename_dag' task_id = 'test_render_log_filename_task' execution_date = datetime(2016, 1, 1) dag = DAG(dag_id, start_date=execution_date) task = DummyOperator(task_id=task_id, dag=dag) ti = TaskInstance(task=task, execution_date=execution_date) filename_template = "{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log" ts = ti.get_template_context()['ts'] expected_filename = "{dag_id}/{task_id}/{ts}/{try_number}.log".format(dag_id=dag_id, task_id=task_id, ts=ts, try_number=try_number) rendered_filename = helpers.render_log_filename(ti, try_number, filename_template) self.assertEqual(rendered_filename, expected_filename)
Example #3
Source File: test_dagrun.py From airflow with Apache License 2.0 | 6 votes |
def test_task_instance_mutation_hook(self, state, mock_hook): def mutate_task_instance(task_instance): if task_instance.queue == 'queue1': task_instance.queue = 'queue2' else: task_instance.queue = 'queue1' mock_hook.side_effect = mutate_task_instance dag = DAG('test_task_instance_mutation_hook', start_date=DEFAULT_DATE) dag.add_task(DummyOperator(task_id='task_to_mutate', owner='test', queue='queue1')) dagrun = self.create_dag_run(dag) task = dagrun.get_task_instances()[0] session = settings.Session() task.state = state session.merge(task) session.commit() assert task.queue == 'queue2' dagrun.verify_integrity() task = dagrun.get_task_instances()[0] assert task.queue == 'queue1'
Example #4
Source File: test_dagrun.py From airflow with Apache License 2.0 | 6 votes |
def test_dagrun_no_deadlock_with_shutdown(self): session = settings.Session() dag = DAG('test_dagrun_no_deadlock_with_shutdown', start_date=DEFAULT_DATE) with dag: op1 = DummyOperator(task_id='upstream_task') op2 = DummyOperator(task_id='downstream_task') op2.set_upstream(op1) dr = dag.create_dagrun(run_id='test_dagrun_no_deadlock_with_shutdown', state=State.RUNNING, execution_date=DEFAULT_DATE, start_date=DEFAULT_DATE) upstream_ti = dr.get_task_instance(task_id='upstream_task') upstream_ti.set_state(State.SHUTDOWN, session=session) dr.update_state() self.assertEqual(dr.state, State.RUNNING)
Example #5
Source File: test_dagrun.py From airflow with Apache License 2.0 | 6 votes |
def test_clear_task_instances_for_backfill_dagrun(self): now = timezone.utcnow() session = settings.Session() dag_id = 'test_clear_task_instances_for_backfill_dagrun' dag = DAG(dag_id=dag_id, start_date=now) self.create_dag_run(dag, execution_date=now, is_backfill=True) task0 = DummyOperator(task_id='backfill_task_0', owner='test', dag=dag) ti0 = TI(task=task0, execution_date=now) ti0.run() qry = session.query(TI).filter( TI.dag_id == dag.dag_id).all() clear_task_instances(qry, session) session.commit() ti0.refresh_from_db() dr0 = session.query(DagRun).filter( DagRun.dag_id == dag_id, DagRun.execution_date == now ).first() self.assertEqual(dr0.state, State.RUNNING)
Example #6
Source File: test_subdag.py From airflow with Apache License 2.0 | 6 votes |
def subdag(parent_dag_name, child_dag_name, args): """ Create a subdag. """ dag_subdag = DAG( dag_id='%s.%s' % (parent_dag_name, child_dag_name), default_args=args, schedule_interval="@daily", ) for i in range(2): DummyOperator( task_id='%s-task-%s' % (child_dag_name, i + 1), default_args=args, dag=dag_subdag, ) return dag_subdag
Example #7
Source File: test_dagrun.py From airflow with Apache License 2.0 | 6 votes |
def test_removed_task_instances_can_be_restored(self): def with_all_tasks_removed(dag): return DAG(dag_id=dag.dag_id, start_date=dag.start_date) dag = DAG('test_task_restoration', start_date=DEFAULT_DATE) dag.add_task(DummyOperator(task_id='flaky_task', owner='test')) dagrun = self.create_dag_run(dag) flaky_ti = dagrun.get_task_instances()[0] self.assertEqual('flaky_task', flaky_ti.task_id) self.assertEqual(State.NONE, flaky_ti.state) dagrun.dag = with_all_tasks_removed(dag) dagrun.verify_integrity() flaky_ti.refresh_from_db() self.assertEqual(State.NONE, flaky_ti.state) dagrun.dag.add_task(DummyOperator(task_id='flaky_task', owner='test')) dagrun.verify_integrity() flaky_ti.refresh_from_db() self.assertEqual(State.NONE, flaky_ti.state)
Example #8
Source File: test_not_previously_skipped_dep.py From airflow with Apache License 2.0 | 6 votes |
def test_no_skipmixin_parent(): """ A simple DAG with no branching. Both op1 and op2 are DummyOperator. NotPreviouslySkippedDep is met. """ start_date = pendulum.datetime(2020, 1, 1) dag = DAG( "test_no_skipmixin_parent_dag", schedule_interval=None, start_date=start_date ) op1 = DummyOperator(task_id="op1", dag=dag) op2 = DummyOperator(task_id="op2", dag=dag) op1 >> op2 ti2 = TaskInstance(op2, start_date) with create_session() as session: dep = NotPreviouslySkippedDep() assert len(list(dep.get_dep_statuses(ti2, session, DepContext()))) == 0 assert dep.is_met(ti2, session) assert ti2.state != State.SKIPPED
Example #9
Source File: dag_cycle_tester.py From airflow with Apache License 2.0 | 6 votes |
def test_cycle_large_loop(self): # large loop dag = DAG( 'dag', start_date=DEFAULT_DATE, default_args={'owner': 'owner1'}) # A -> B -> C -> D -> E -> A with dag: op1 = DummyOperator(task_id='A') op2 = DummyOperator(task_id='B') op3 = DummyOperator(task_id='C') op4 = DummyOperator(task_id='D') op5 = DummyOperator(task_id='E') op1.set_downstream(op2) op2.set_downstream(op3) op3.set_downstream(op4) op4.set_downstream(op5) op5.set_downstream(op1) with self.assertRaises(AirflowDagCycleException): self.assertFalse(test_cycle(dag))
Example #10
Source File: dag_cycle_tester.py From airflow with Apache License 2.0 | 6 votes |
def test_cycle_arbitrary_loop(self): # test arbitrary loop dag = DAG( 'dag', start_date=DEFAULT_DATE, default_args={'owner': 'owner1'}) # E-> A -> B -> F -> A # -> C -> F with dag: op1 = DummyOperator(task_id='A') op2 = DummyOperator(task_id='B') op3 = DummyOperator(task_id='C') op4 = DummyOperator(task_id='E') op5 = DummyOperator(task_id='F') op1.set_downstream(op2) op1.set_downstream(op3) op4.set_downstream(op1) op3.set_downstream(op5) op2.set_downstream(op5) op5.set_downstream(op1) with self.assertRaises(AirflowDagCycleException): self.assertFalse(test_cycle(dag))
Example #11
Source File: test_dag.py From airflow with Apache License 2.0 | 6 votes |
def test_tree_view(self): """Verify correctness of dag.tree_view().""" with DAG("test_dag", start_date=DEFAULT_DATE) as dag: op1 = DummyOperator(task_id="t1") op2 = DummyOperator(task_id="t2") op3 = DummyOperator(task_id="t3") op1 >> op2 >> op3 with redirect_stdout(io.StringIO()) as stdout: dag.tree_view() stdout = stdout.getvalue() stdout_lines = stdout.split("\n") self.assertIn('t1', stdout_lines[0]) self.assertIn('t2', stdout_lines[1]) self.assertIn('t3', stdout_lines[2])
Example #12
Source File: test_dag.py From airflow with Apache License 2.0 | 6 votes |
def test_sync_to_db_default_view(self, mock_now): dag = DAG( 'dag', start_date=DEFAULT_DATE, default_view="graph", ) with dag: DummyOperator(task_id='task', owner='owner1') SubDagOperator( task_id='subtask', owner='owner2', subdag=DAG( 'dag.subtask', start_date=DEFAULT_DATE, ) ) now = datetime.datetime.utcnow().replace(tzinfo=pendulum.timezone('UTC')) mock_now.return_value = now session = settings.Session() dag.sync_to_db(session=session) orm_dag = session.query(DagModel).filter(DagModel.dag_id == 'dag').one() self.assertIsNotNone(orm_dag.default_view) self.assertEqual(orm_dag.default_view, "graph") session.close()
Example #13
Source File: test_dag.py From airflow with Apache License 2.0 | 6 votes |
def test_resolve_template_files_list(self): with NamedTemporaryFile(suffix='.template') as f: f.write(b'{{ ds }}') f.flush() template_dir = os.path.dirname(f.name) template_file = os.path.basename(f.name) with DAG('test-dag', start_date=DEFAULT_DATE, template_searchpath=template_dir): task = DummyOperator(task_id='op1') task.test_field = [template_file, 'some_string'] task.template_fields = ('test_field',) task.template_ext = ('.template',) task.resolve_template_files() self.assertEqual(task.test_field, ['{{ ds }}', 'some_string'])
Example #14
Source File: test_dag.py From airflow with Apache License 2.0 | 6 votes |
def test_resolve_template_files_value(self): with NamedTemporaryFile(suffix='.template') as f: f.write(b'{{ ds }}') f.flush() template_dir = os.path.dirname(f.name) template_file = os.path.basename(f.name) with DAG('test-dag', start_date=DEFAULT_DATE, template_searchpath=template_dir): task = DummyOperator(task_id='op1') task.test_field = template_file task.template_fields = ('test_field',) task.template_ext = ('.template',) task.resolve_template_files() self.assertEqual(task.test_field, '{{ ds }}')
Example #15
Source File: test_dag.py From airflow with Apache License 2.0 | 6 votes |
def test_dag_task_priority_weight_total_using_absolute(self): # Same test as above except use 'absolute' for weight calculation weight = 10 width = 5 depth = 5 with DAG('dag', start_date=DEFAULT_DATE, default_args={'owner': 'owner1'}) as dag: pipeline = [ [DummyOperator( task_id='stage{}.{}'.format(i, j), priority_weight=weight, weight_rule=WeightRule.ABSOLUTE) for j in range(0, width)] for i in range(0, depth) ] for i, stage in enumerate(pipeline): if i == 0: continue for current_task in stage: for prev_task in pipeline[i - 1]: current_task.set_upstream(prev_task) for task in dag.task_dict.values(): # the sum of each stages after this task + itself correct_weight = weight calculated_weight = task.priority_weight_total self.assertEqual(calculated_weight, correct_weight)
Example #16
Source File: test_cleartasks.py From airflow with Apache License 2.0 | 6 votes |
def test_operator_clear(self): dag = DAG('test_operator_clear', start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + datetime.timedelta(days=10)) op1 = DummyOperator(task_id='bash_op', owner='test', dag=dag) op2 = DummyOperator(task_id='dummy_op', owner='test', dag=dag, retries=1) op2.set_upstream(op1) ti1 = TI(task=op1, execution_date=DEFAULT_DATE) ti2 = TI(task=op2, execution_date=DEFAULT_DATE) ti2.run() # Dependency not met self.assertEqual(ti2.try_number, 1) self.assertEqual(ti2.max_tries, 1) op2.clear(upstream=True) ti1.run() ti2.run() self.assertEqual(ti1.try_number, 2) # max_tries is 0 because there is no task instance in db for ti1 # so clear won't change the max_tries. self.assertEqual(ti1.max_tries, 0) self.assertEqual(ti2.try_number, 2) # try_number (0) + retries(1) self.assertEqual(ti2.max_tries, 1)
Example #17
Source File: test_cleartasks.py From airflow with Apache License 2.0 | 6 votes |
def test_clear_task_instances_without_dag(self): dag = DAG('test_clear_task_instances_without_dag', start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + datetime.timedelta(days=10)) task0 = DummyOperator(task_id='task_0', owner='test', dag=dag) task1 = DummyOperator(task_id='task_1', owner='test', dag=dag, retries=2) ti0 = TI(task=task0, execution_date=DEFAULT_DATE) ti1 = TI(task=task1, execution_date=DEFAULT_DATE) ti0.run() ti1.run() with create_session() as session: qry = session.query(TI).filter( TI.dag_id == dag.dag_id).all() clear_task_instances(qry, session) # When dag is None, max_tries will be maximum of original max_tries or try_number. ti0.refresh_from_db() ti1.refresh_from_db() # Next try to run will be try 2 self.assertEqual(ti0.try_number, 2) self.assertEqual(ti0.max_tries, 1) self.assertEqual(ti1.try_number, 2) self.assertEqual(ti1.max_tries, 2)
Example #18
Source File: test_pool.py From airflow with Apache License 2.0 | 6 votes |
def test_default_pool_open_slots(self): set_default_pool_slots(5) self.assertEqual(5, Pool.get_default_pool().open_slots()) dag = DAG( dag_id='test_default_pool_open_slots', start_date=DEFAULT_DATE, ) op1 = DummyOperator(task_id='dummy1', dag=dag) op2 = DummyOperator(task_id='dummy2', dag=dag, pool_slots=2) ti1 = TI(task=op1, execution_date=DEFAULT_DATE) ti2 = TI(task=op2, execution_date=DEFAULT_DATE) ti1.state = State.RUNNING ti2.state = State.QUEUED session = settings.Session session.add(ti1) session.add(ti2) session.commit() session.close() self.assertEqual(2, Pool.get_default_pool().open_slots())
Example #19
Source File: test_timestamp.py From airflow with Apache License 2.0 | 5 votes |
def add_log(execdate, session, timezone_override=None): dag = DAG(dag_id='logging', default_args={'start_date': execdate}) task = DummyOperator(task_id='dummy', dag=dag, owner='airflow') task_instance = TaskInstance(task=task, execution_date=execdate, state='success') session.merge(task_instance) log = Log(State.RUNNING, task_instance) if timezone_override: log.dttm = log.dttm.astimezone(timezone_override) session.add(log) session.commit() return log
Example #20
Source File: test_dag.py From airflow with Apache License 2.0 | 5 votes |
def test_dag_task_priority_weight_total(self): width = 5 depth = 5 weight = 5 pattern = re.compile('stage(\\d*).(\\d*)') # Fully connected parallel tasks. i.e. every task at each parallel # stage is dependent on every task in the previous stage. # Default weight should be calculated using downstream descendants with DAG('dag', start_date=DEFAULT_DATE, default_args={'owner': 'owner1'}) as dag: pipeline = [ [DummyOperator( task_id='stage{}.{}'.format(i, j), priority_weight=weight) for j in range(0, width)] for i in range(0, depth) ] for i, stage in enumerate(pipeline): if i == 0: continue for current_task in stage: for prev_task in pipeline[i - 1]: current_task.set_upstream(prev_task) for task in dag.task_dict.values(): match = pattern.match(task.task_id) task_depth = int(match.group(1)) # the sum of each stages after this task + itself correct_weight = ((depth - (task_depth + 1)) * width + 1) * weight calculated_weight = task.priority_weight_total self.assertEqual(calculated_weight, correct_weight)
Example #21
Source File: test_dag.py From airflow with Apache License 2.0 | 5 votes |
def test_dag_topological_sort2(self): dag = DAG( 'dag', start_date=DEFAULT_DATE, default_args={'owner': 'owner1'}) # C -> (A u B) -> D # C -> E # ordered: E | D, A | B, C with dag: op1 = DummyOperator(task_id='A') op2 = DummyOperator(task_id='B') op3 = DummyOperator(task_id='C') op4 = DummyOperator(task_id='D') op5 = DummyOperator(task_id='E') op1.set_downstream(op3) op2.set_downstream(op3) op1.set_upstream(op4) op2.set_upstream(op4) op5.set_downstream(op3) topological_list = dag.topological_sort() logging.info(topological_list) set1 = [op4, op5] self.assertTrue(topological_list[0] in set1) set1.remove(topological_list[0]) set2 = [op1, op2] set2.extend(set1) self.assertTrue(topological_list[1] in set2) set2.remove(topological_list[1]) self.assertTrue(topological_list[2] in set2) set2.remove(topological_list[2]) self.assertTrue(topological_list[3] in set2) self.assertTrue(topological_list[4] == op3)
Example #22
Source File: test_dag.py From airflow with Apache License 2.0 | 5 votes |
def test_dag_topological_sort1(self): dag = DAG( 'dag', start_date=DEFAULT_DATE, default_args={'owner': 'owner1'}) # A -> B # A -> C -> D # ordered: B, D, C, A or D, B, C, A or D, C, B, A with dag: op1 = DummyOperator(task_id='A') op2 = DummyOperator(task_id='B') op3 = DummyOperator(task_id='C') op4 = DummyOperator(task_id='D') op1.set_upstream([op2, op3]) op3.set_upstream(op4) topological_list = dag.topological_sort() logging.info(topological_list) tasks = [op2, op3, op4] self.assertTrue(topological_list[0] in tasks) tasks.remove(topological_list[0]) self.assertTrue(topological_list[1] in tasks) tasks.remove(topological_list[1]) self.assertTrue(topological_list[2] in tasks) tasks.remove(topological_list[2]) self.assertTrue(topological_list[3] == op1)
Example #23
Source File: test_dag.py From airflow with Apache License 2.0 | 5 votes |
def test_duplicate_task_ids_not_allowed_without_dag_context_manager(self): """Verify tasks with Duplicate task_id raises error""" with self.assertRaisesRegex( DuplicateTaskIdFound, "Task id 't1' has already been added to the DAG" ): dag = DAG("test_dag", start_date=DEFAULT_DATE) op1 = DummyOperator(task_id="t1", dag=dag) op2 = DummyOperator(task_id="t1", dag=dag) op1 >> op2 self.assertEqual(dag.task_dict, {op1.task_id: op1})
Example #24
Source File: test_cleartasks.py From airflow with Apache License 2.0 | 5 votes |
def test_dag_clear(self): dag = DAG('test_dag_clear', start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + datetime.timedelta(days=10)) task0 = DummyOperator(task_id='test_dag_clear_task_0', owner='test', dag=dag) ti0 = TI(task=task0, execution_date=DEFAULT_DATE) # Next try to run will be try 1 self.assertEqual(ti0.try_number, 1) ti0.run() self.assertEqual(ti0.try_number, 2) dag.clear() ti0.refresh_from_db() self.assertEqual(ti0.try_number, 2) self.assertEqual(ti0.state, State.NONE) self.assertEqual(ti0.max_tries, 1) task1 = DummyOperator(task_id='test_dag_clear_task_1', owner='test', dag=dag, retries=2) ti1 = TI(task=task1, execution_date=DEFAULT_DATE) self.assertEqual(ti1.max_tries, 2) ti1.try_number = 1 # Next try will be 2 ti1.run() self.assertEqual(ti1.try_number, 3) self.assertEqual(ti1.max_tries, 2) dag.clear() ti0.refresh_from_db() ti1.refresh_from_db() # after clear dag, ti2 should show attempt 3 of 5 self.assertEqual(ti1.max_tries, 4) self.assertEqual(ti1.try_number, 3) # after clear dag, ti1 should show attempt 2 of 2 self.assertEqual(ti0.try_number, 2) self.assertEqual(ti0.max_tries, 1)
Example #25
Source File: test_cleartasks.py From airflow with Apache License 2.0 | 5 votes |
def test_clear_task_instances_without_task(self): dag = DAG('test_clear_task_instances_without_task', start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + datetime.timedelta(days=10)) task0 = DummyOperator(task_id='task0', owner='test', dag=dag) task1 = DummyOperator(task_id='task1', owner='test', dag=dag, retries=2) ti0 = TI(task=task0, execution_date=DEFAULT_DATE) ti1 = TI(task=task1, execution_date=DEFAULT_DATE) ti0.run() ti1.run() # Remove the task from dag. dag.task_dict = {} self.assertFalse(dag.has_task(task0.task_id)) self.assertFalse(dag.has_task(task1.task_id)) with create_session() as session: qry = session.query(TI).filter( TI.dag_id == dag.dag_id).all() clear_task_instances(qry, session) # When dag is None, max_tries will be maximum of original max_tries or try_number. ti0.refresh_from_db() ti1.refresh_from_db() # Next try to run will be try 2 self.assertEqual(ti0.try_number, 2) self.assertEqual(ti0.max_tries, 1) self.assertEqual(ti1.try_number, 2) self.assertEqual(ti1.max_tries, 2)
Example #26
Source File: test_not_previously_skipped_dep.py From airflow with Apache License 2.0 | 5 votes |
def test_no_parent(): """ A simple DAG with a single task. NotPreviouslySkippedDep is met. """ start_date = pendulum.datetime(2020, 1, 1) dag = DAG("test_test_no_parent_dag", schedule_interval=None, start_date=start_date) op1 = DummyOperator(task_id="op1", dag=dag) ti1 = TaskInstance(op1, start_date) with create_session() as session: dep = NotPreviouslySkippedDep() assert len(list(dep.get_dep_statuses(ti1, session, DepContext()))) == 0 assert dep.is_met(ti1, session) assert ti1.state != State.SKIPPED
Example #27
Source File: test_baseoperator.py From airflow with Apache License 2.0 | 5 votes |
def test_set_xcomargs_dependencies_works_when_set_after_init(self): with DAG(dag_id='xcomargs_test', default_args={"start_date": datetime.today()}): op1 = DummyOperator(task_id="op1") op2 = CustomOp(task_id="op2") op2.field = op1.output # value is set after init assert op1 in op2.upstream_list
Example #28
Source File: test_pool.py From airflow with Apache License 2.0 | 5 votes |
def test_open_slots(self): pool = Pool(pool='test_pool', slots=5) dag = DAG( dag_id='test_open_slots', start_date=DEFAULT_DATE, ) op1 = DummyOperator(task_id='dummy1', dag=dag, pool='test_pool') op2 = DummyOperator(task_id='dummy2', dag=dag, pool='test_pool') ti1 = TI(task=op1, execution_date=DEFAULT_DATE) ti2 = TI(task=op2, execution_date=DEFAULT_DATE) ti1.state = State.RUNNING ti2.state = State.QUEUED session = settings.Session session.add(pool) session.add(ti1) session.add(ti2) session.commit() session.close() self.assertEqual(3, pool.open_slots()) # pylint: disable=no-value-for-parameter self.assertEqual(1, pool.running_slots()) # pylint: disable=no-value-for-parameter self.assertEqual(1, pool.queued_slots()) # pylint: disable=no-value-for-parameter self.assertEqual(2, pool.occupied_slots()) # pylint: disable=no-value-for-parameter self.assertEqual({ "default_pool": { "open": 128, "queued": 0, "total": 128, "running": 0, }, "test_pool": { "open": 3, "queued": 1, "running": 1, "total": 5, }, }, pool.slots_stats())
Example #29
Source File: test_baseoperator.py From airflow with Apache License 2.0 | 5 votes |
def test_set_xcomargs_dependencies_error_when_outside_dag(self): with pytest.raises(AirflowException): op1 = DummyOperator(task_id="op1") CustomOp(task_id="op2", field=op1.output)
Example #30
Source File: test_dagrun.py From airflow with Apache License 2.0 | 5 votes |
def test_dagrun_success_callback(self): def on_success_callable(context): self.assertEqual( context['dag_run'].dag_id, 'test_dagrun_success_callback' ) dag = DAG( dag_id='test_dagrun_success_callback', start_date=datetime.datetime(2017, 1, 1), on_success_callback=on_success_callable, ) dag_task1 = DummyOperator( task_id='test_state_succeeded1', dag=dag) dag_task2 = DummyOperator( task_id='test_state_succeeded2', dag=dag) dag_task1.set_downstream(dag_task2) initial_task_states = { 'test_state_succeeded1': State.SUCCESS, 'test_state_succeeded2': State.SUCCESS, } dag_run = self.create_dag_run(dag=dag, state=State.RUNNING, task_states=initial_task_states) dag_run.update_state() self.assertEqual(State.SUCCESS, dag_run.state)