Python airflow.models.DAG Examples

The following are 30 code examples of airflow.models.DAG(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module airflow.models , or try the search function .
Example #1
Source File: test_pool.py    From airflow with Apache License 2.0 7 votes vote down vote up
def test_infinite_slots(self):
        pool = Pool(pool='test_pool', slots=-1)
        dag = DAG(
            dag_id='test_infinite_slots',
            start_date=DEFAULT_DATE, )
        op1 = DummyOperator(task_id='dummy1', dag=dag, pool='test_pool')
        op2 = DummyOperator(task_id='dummy2', dag=dag, pool='test_pool')
        ti1 = TI(task=op1, execution_date=DEFAULT_DATE)
        ti2 = TI(task=op2, execution_date=DEFAULT_DATE)
        ti1.state = State.RUNNING
        ti2.state = State.QUEUED

        session = settings.Session
        session.add(pool)
        session.add(ti1)
        session.add(ti2)
        session.commit()
        session.close()

        self.assertEqual(float('inf'), pool.open_slots())  # pylint: disable=no-value-for-parameter
        self.assertEqual(1, pool.running_slots())  # pylint: disable=no-value-for-parameter
        self.assertEqual(1, pool.queued_slots())  # pylint: disable=no-value-for-parameter
        self.assertEqual(2, pool.occupied_slots())  # pylint: disable=no-value-for-parameter 
Example #2
Source File: test_cloud_storage_transfer_service.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_templates(self, _):
        dag_id = 'test_dag_id'
        args = {'start_date': DEFAULT_DATE}
        self.dag = DAG(dag_id, default_args=args)  # pylint: disable=attribute-defined-outside-init
        op = CloudDataTransferServiceGCSToGCSOperator(
            source_bucket='{{ dag.dag_id }}',
            destination_bucket='{{ dag.dag_id }}',
            description='{{ dag.dag_id }}',
            object_conditions={'exclude_prefixes': ['{{ dag.dag_id }}']},
            gcp_conn_id='{{ dag.dag_id }}',
            task_id=TASK_ID,
            dag=self.dag,
        )
        ti = TaskInstance(op, DEFAULT_DATE)
        ti.render_templates()
        self.assertEqual(dag_id, getattr(op, 'source_bucket'))
        self.assertEqual(dag_id, getattr(op, 'destination_bucket'))
        self.assertEqual(dag_id, getattr(op, 'description'))

        # pylint: disable=unsubscriptable-object
        self.assertEqual(dag_id, getattr(op, 'object_conditions')['exclude_prefixes'][0])
        # pylint: enable=unsubscriptable-object

        self.assertEqual(dag_id, getattr(op, 'gcp_conn_id')) 
Example #3
Source File: test_trigger_dag.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_trigger_dag_with_valid_start_date(self, dag_bag_mock):
        dag_id = "trigger_dag_with_valid_start_date"
        dag = DAG(dag_id, default_args={'start_date': timezone.datetime(2016, 9, 5, 10, 10, 0)})
        dag_bag_mock.dags = [dag_id]
        dag_bag_mock.get_dag.return_value = dag
        dag_run = DagRun()

        triggers = _trigger_dag(
            dag_id,
            dag_bag_mock,
            dag_run,
            run_id=None,
            conf=None,
            execution_date=timezone.datetime(2018, 7, 5, 10, 10, 0),
            replace_microseconds=True,
        )

        assert len(triggers) == 1 
Example #4
Source File: test_trigger_dag.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_trigger_dag_with_dict_conf(self, dag_bag_mock):
        dag_id = "trigger_dag_with_dict_conf"
        dag = DAG(dag_id)
        dag_bag_mock.dags = [dag_id]
        dag_bag_mock.get_dag.return_value = dag
        conf = dict(foo="bar")
        dag_run = DagRun()
        triggers = _trigger_dag(
            dag_id,
            dag_bag_mock,
            dag_run,
            run_id=None,
            conf=conf,
            execution_date=None,
            replace_microseconds=True)

        self.assertEqual(triggers[0].conf, conf) 
Example #5
Source File: test_compute.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_set_machine_type_with_templates(self, _):
        dag_id = 'test_dag_id'
        args = {
            'start_date': DEFAULT_DATE
        }
        self.dag = DAG(dag_id, default_args=args)  # pylint: disable=attribute-defined-outside-init
        op = ComputeEngineSetMachineTypeOperator(
            project_id='{{ dag.dag_id }}',
            zone='{{ dag.dag_id }}',
            resource_id='{{ dag.dag_id }}',
            body={},
            gcp_conn_id='{{ dag.dag_id }}',
            api_version='{{ dag.dag_id }}',
            task_id='id',
            dag=self.dag
        )
        ti = TaskInstance(op, DEFAULT_DATE)
        ti.render_templates()
        self.assertEqual(dag_id, getattr(op, 'project_id'))
        self.assertEqual(dag_id, getattr(op, 'zone'))
        self.assertEqual(dag_id, getattr(op, 'resource_id'))
        self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
        self.assertEqual(dag_id, getattr(op, 'api_version')) 
Example #6
Source File: test_sqlalchemy.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_process_bind_param_naive(self):
        """
        Check if naive datetimes are prevented from saving to the db
        """
        dag_id = 'test_process_bind_param_naive'

        # naive
        start_date = datetime.datetime.now()
        dag = DAG(dag_id=dag_id, start_date=start_date)
        dag.clear()

        with self.assertRaises((ValueError, StatementError)):
            dag.create_dagrun(
                run_id=start_date.isoformat,
                state=State.NONE,
                execution_date=start_date,
                start_date=start_date,
                session=self.session
            )
        dag.clear() 
Example #7
Source File: test_trigger_dag.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_trigger_dag_with_str_conf(self, dag_bag_mock):
        dag_id = "trigger_dag_with_str_conf"
        dag = DAG(dag_id)
        dag_bag_mock.dags = [dag_id]
        dag_bag_mock.get_dag.return_value = dag
        conf = "{\"foo\": \"bar\"}"
        dag_run = DagRun()
        triggers = _trigger_dag(
            dag_id,
            dag_bag_mock,
            dag_run,
            run_id=None,
            conf=conf,
            execution_date=None,
            replace_microseconds=True)

        self.assertEqual(triggers[0].conf, json.loads(conf)) 
Example #8
Source File: test_datasync.py    From airflow with Apache License 2.0 6 votes vote down vote up
def setUp(self):
        args = {
            "owner": "airflow",
            "start_date": DEFAULT_DATE,
        }

        self.dag = DAG(
            TEST_DAG_ID + "test_schedule_dag_once",
            default_args=args,
            schedule_interval="@once",
        )

        self.client = boto3.client("datasync", region_name="us-east-1")

        self.source_location_arn = self.client.create_location_smb(
            **MOCK_DATA["create_source_location_kwargs"]
        )["LocationArn"]
        self.destination_location_arn = self.client.create_location_s3(
            **MOCK_DATA["create_destination_location_kwargs"]
        )["LocationArn"]
        self.task_arn = self.client.create_task(
            SourceLocationArn=self.source_location_arn,
            DestinationLocationArn=self.destination_location_arn,
        )["TaskArn"] 
Example #9
Source File: test_trigger_dag.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_trigger_dag_dag_run_exist(self, dag_bag_mock, dag_run_mock):
        dag_id = "dag_run_exist"
        dag = DAG(dag_id)
        dag_bag_mock.dags = [dag_id]
        dag_bag_mock.get_dag.return_value = dag
        dag_run_mock.find.return_value = DagRun()
        self.assertRaises(
            AirflowException,
            _trigger_dag,
            dag_id,
            dag_bag_mock,
            dag_run_mock,
            run_id=None,
            conf=None,
            execution_date=None,
            replace_microseconds=True,
        ) 
Example #10
Source File: test_dag.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_dag_task_priority_weight_total_using_absolute(self):
        # Same test as above except use 'absolute' for weight calculation
        weight = 10
        width = 5
        depth = 5
        with DAG('dag', start_date=DEFAULT_DATE,
                 default_args={'owner': 'owner1'}) as dag:
            pipeline = [
                [DummyOperator(
                    task_id='stage{}.{}'.format(i, j), priority_weight=weight,
                    weight_rule=WeightRule.ABSOLUTE)
                    for j in range(0, width)] for i in range(0, depth)
            ]
            for i, stage in enumerate(pipeline):
                if i == 0:
                    continue
                for current_task in stage:
                    for prev_task in pipeline[i - 1]:
                        current_task.set_upstream(prev_task)

            for task in dag.task_dict.values():
                # the sum of each stages after this task + itself
                correct_weight = weight
                calculated_weight = task.priority_weight_total
                self.assertEqual(calculated_weight, correct_weight) 
Example #11
Source File: test_s3_to_sftp.py    From airflow with Apache License 2.0 6 votes vote down vote up
def setUp(self):
        from airflow.providers.ssh.hooks.ssh import SSHHook
        from airflow.providers.amazon.aws.hooks.s3 import S3Hook

        hook = SSHHook(ssh_conn_id='ssh_default')
        s3_hook = S3Hook('aws_default')
        hook.no_host_key_check = True
        args = {
            'owner': 'airflow',
            'start_date': DEFAULT_DATE,
        }
        dag = DAG(TEST_DAG_ID + 'test_schedule_dag_once', default_args=args)
        dag.schedule_interval = '@once'

        self.hook = hook
        self.s3_hook = s3_hook

        self.ssh_client = self.hook.get_conn()
        self.sftp_client = self.ssh_client.open_sftp()

        self.dag = dag
        self.s3_bucket = BUCKET
        self.sftp_path = SFTP_PATH
        self.s3_key = S3_KEY 
Example #12
Source File: test_compute.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_instance_stop_with_templates(self, _):
        dag_id = 'test_dag_id'
        args = {
            'start_date': DEFAULT_DATE
        }
        self.dag = DAG(dag_id, default_args=args)  # pylint: disable=attribute-defined-outside-init
        op = ComputeEngineStopInstanceOperator(
            project_id='{{ dag.dag_id }}',
            zone='{{ dag.dag_id }}',
            resource_id='{{ dag.dag_id }}',
            gcp_conn_id='{{ dag.dag_id }}',
            api_version='{{ dag.dag_id }}',
            task_id='id',
            dag=self.dag
        )
        ti = TaskInstance(op, DEFAULT_DATE)
        ti.render_templates()
        self.assertEqual(dag_id, getattr(op, 'project_id'))
        self.assertEqual(dag_id, getattr(op, 'zone'))
        self.assertEqual(dag_id, getattr(op, 'resource_id'))
        self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
        self.assertEqual(dag_id, getattr(op, 'api_version')) 
Example #13
Source File: test_compute.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_instance_start_with_templates(self, _):
        dag_id = 'test_dag_id'
        args = {
            'start_date': DEFAULT_DATE
        }
        self.dag = DAG(dag_id, default_args=args)  # pylint: disable=attribute-defined-outside-init
        op = ComputeEngineStartInstanceOperator(
            project_id='{{ dag.dag_id }}',
            zone='{{ dag.dag_id }}',
            resource_id='{{ dag.dag_id }}',
            gcp_conn_id='{{ dag.dag_id }}',
            api_version='{{ dag.dag_id }}',
            task_id='id',
            dag=self.dag
        )
        ti = TaskInstance(op, DEFAULT_DATE)
        ti.render_templates()
        self.assertEqual(dag_id, getattr(op, 'project_id'))
        self.assertEqual(dag_id, getattr(op, 'zone'))
        self.assertEqual(dag_id, getattr(op, 'resource_id'))
        self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
        self.assertEqual(dag_id, getattr(op, 'api_version')) 
Example #14
Source File: test_cloud_storage_transfer_service.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_templates(self, _):
        dag_id = 'test_dag_id'
        args = {'start_date': DEFAULT_DATE}
        self.dag = DAG(dag_id, default_args=args)  # pylint: disable=attribute-defined-outside-init
        op = CloudDataTransferServiceS3ToGCSOperator(
            s3_bucket='{{ dag.dag_id }}',
            gcs_bucket='{{ dag.dag_id }}',
            description='{{ dag.dag_id }}',
            object_conditions={'exclude_prefixes': ['{{ dag.dag_id }}']},
            gcp_conn_id='{{ dag.dag_id }}',
            task_id=TASK_ID,
            dag=self.dag,
        )
        ti = TaskInstance(op, DEFAULT_DATE)
        ti.render_templates()
        self.assertEqual(dag_id, getattr(op, 's3_bucket'))
        self.assertEqual(dag_id, getattr(op, 'gcs_bucket'))
        self.assertEqual(dag_id, getattr(op, 'description'))

        # pylint: disable=unsubscriptable-object
        self.assertEqual(dag_id, getattr(op, 'object_conditions')['exclude_prefixes'][0])
        # pylint: enable=unsubscriptable-object

        self.assertEqual(dag_id, getattr(op, 'gcp_conn_id')) 
Example #15
Source File: test_dag.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_params_passed_and_params_in_default_args_no_override(self):
        """
        Test that when 'params' exists as a key passed to the default_args dict
        in addition to params being passed explicitly as an argument to the
        dag, that the 'params' key of the default_args dict is merged with the
        dict of the params argument.
        """
        params1 = {'parameter1': 1}
        params2 = {'parameter2': 2}

        dag = models.DAG('test-dag',
                         default_args={'params': params1},
                         params=params2)

        params_combined = params1.copy()
        params_combined.update(params2)
        self.assertEqual(params_combined, dag.params) 
Example #16
Source File: test_qubole.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_get_redirect_url(self):
        dag = DAG(DAG_ID, start_date=DEFAULT_DATE)

        with dag:
            task = QuboleOperator(task_id=TASK_ID,
                                  qubole_conn_id=TEST_CONN,
                                  command_type='shellcmd',
                                  parameters="param1 param2",
                                  dag=dag)

        ti = TaskInstance(task=task, execution_date=DEFAULT_DATE)
        ti.xcom_push('qbol_cmd_id', 12345)

        # check for positive case
        url = task.get_extra_links(DEFAULT_DATE, 'Go to QDS')
        self.assertEqual(url, 'http://localhost/v2/analyze?command_id=12345')

        # check for negative case
        url2 = task.get_extra_links(datetime(2017, 1, 2), 'Go to QDS')
        self.assertEqual(url2, '') 
Example #17
Source File: test_cloud_storage_transfer_service.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_operation_resume_with_templates(self, _):
        dag_id = 'test_dag_id'
        args = {'start_date': DEFAULT_DATE}
        self.dag = DAG(dag_id, default_args=args)  # pylint: disable=attribute-defined-outside-init
        op = CloudDataTransferServiceResumeOperationOperator(
            operation_name='{{ dag.dag_id }}',
            gcp_conn_id='{{ dag.dag_id }}',
            api_version='{{ dag.dag_id }}',
            task_id=TASK_ID,
            dag=self.dag,
        )
        ti = TaskInstance(op, DEFAULT_DATE)
        ti.render_templates()
        self.assertEqual(dag_id, getattr(op, 'operation_name'))
        self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
        self.assertEqual(dag_id, getattr(op, 'api_version')) 
Example #18
Source File: test_pool.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_default_pool_open_slots(self):
        set_default_pool_slots(5)
        self.assertEqual(5, Pool.get_default_pool().open_slots())

        dag = DAG(
            dag_id='test_default_pool_open_slots',
            start_date=DEFAULT_DATE, )
        op1 = DummyOperator(task_id='dummy1', dag=dag)
        op2 = DummyOperator(task_id='dummy2', dag=dag, pool_slots=2)
        ti1 = TI(task=op1, execution_date=DEFAULT_DATE)
        ti2 = TI(task=op2, execution_date=DEFAULT_DATE)
        ti1.state = State.RUNNING
        ti2.state = State.QUEUED

        session = settings.Session
        session.add(ti1)
        session.add(ti2)
        session.commit()
        session.close()

        self.assertEqual(2, Pool.get_default_pool().open_slots()) 
Example #19
Source File: test_qubole.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_partition_sensor_error(self, patched_poke):
        patched_poke.return_value = True

        dag = DAG(DAG_ID, start_date=DEFAULT_DATE)

        with self.assertRaises(AirflowException):
            QubolePartitionSensor(
                task_id='test_qubole_partition_sensor',
                poke_interval=1,
                data={
                    "schema": "default",
                    "table": "my_partitioned_table",
                    "columns": [{"column": "month", "values": ["1", "2"]}]
                },
                dag=dag
            ) 
Example #20
Source File: test_cloud_storage_transfer_service.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_operation_pause_with_templates(self, _):
        dag_id = 'test_dag_id'
        args = {'start_date': DEFAULT_DATE}
        self.dag = DAG(dag_id, default_args=args)  # pylint: disable=attribute-defined-outside-init
        op = CloudDataTransferServicePauseOperationOperator(
            operation_name='{{ dag.dag_id }}',
            gcp_conn_id='{{ dag.dag_id }}',
            api_version='{{ dag.dag_id }}',
            task_id=TASK_ID,
            dag=self.dag,
        )
        ti = TaskInstance(op, DEFAULT_DATE)
        ti.render_templates()
        self.assertEqual(dag_id, getattr(op, 'operation_name'))
        self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
        self.assertEqual(dag_id, getattr(op, 'api_version')) 
Example #21
Source File: test_cloud_storage_transfer_service.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_templates(self, _):
        dag_id = 'test_dag_id'
        args = {'start_date': DEFAULT_DATE}
        self.dag = DAG(dag_id, default_args=args)  # pylint: disable=attribute-defined-outside-init
        op = CloudDataTransferServiceListOperationsOperator(
            request_filter={"job_names": ['{{ dag.dag_id }}']},
            gcp_conn_id='{{ dag.dag_id }}',
            task_id='task-id',
            dag=self.dag,
        )
        ti = TaskInstance(op, DEFAULT_DATE)
        ti.render_templates()

        # pylint: disable=unsubscriptable-object
        self.assertEqual(dag_id, getattr(op, 'filter')['job_names'][0])
        # pylint: enable=unsubscriptable-object

        self.assertEqual(dag_id, getattr(op, 'gcp_conn_id')) 
Example #22
Source File: test_cloud_build.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_load_templated_yaml(self):
        dag = DAG(dag_id='example_cloudbuild_operator', start_date=TEST_DEFAULT_DATE)
        with tempfile.NamedTemporaryFile(suffix='.yaml', mode='w+t') as build:
            build.writelines("""
            steps:
                - name: 'ubuntu'
                  args: ['echo', 'Hello {{ params.name }}!']
            """)
            build.seek(0)
            body_path = build.name
            operator = CloudBuildCreateBuildOperator(
                body=body_path,
                task_id="task-id", dag=dag,
                params={'name': 'airflow'}
            )
            operator.prepare_template()
            ti = TaskInstance(operator, TEST_DEFAULT_DATE)
            ti.render_templates()
            expected_body = {'steps': [
                {'name': 'ubuntu',
                 'args': ['echo', 'Hello airflow!']
                 }
            ]
            }
            self.assertEqual(expected_body, operator.body) 
Example #23
Source File: test_cloud_storage_transfer_service.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_templates(self, _):
        dag_id = 'test_dag_id'
        # pylint: disable=attribute-defined-outside-init
        self.dag = DAG(dag_id, default_args={'start_date': DEFAULT_DATE})
        op = CloudDataTransferServiceCreateJobOperator(
            body={"description": "{{ dag.dag_id }}"},
            gcp_conn_id='{{ dag.dag_id }}',
            aws_conn_id='{{ dag.dag_id }}',
            task_id='task-id',
            dag=self.dag,
        )
        ti = TaskInstance(op, DEFAULT_DATE)
        ti.render_templates()
        self.assertEqual(dag_id, getattr(op, 'body')[DESCRIPTION])
        self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
        self.assertEqual(dag_id, getattr(op, 'aws_conn_id')) 
Example #24
Source File: test_cloud_storage_transfer_service.py    From airflow with Apache License 2.0 6 votes vote down vote up
def test_job_delete_with_templates(self, _):
        dag_id = 'test_dag_id'
        args = {'start_date': DEFAULT_DATE}
        self.dag = DAG(dag_id, default_args=args)  # pylint: disable=attribute-defined-outside-init
        op = CloudDataTransferServiceDeleteJobOperator(
            job_name='{{ dag.dag_id }}',
            gcp_conn_id='{{ dag.dag_id }}',
            api_version='{{ dag.dag_id }}',
            task_id=TASK_ID,
            dag=self.dag,
        )
        ti = TaskInstance(op, DEFAULT_DATE)
        ti.render_templates()
        self.assertEqual(dag_id, getattr(op, 'job_name'))
        self.assertEqual(dag_id, getattr(op, 'gcp_conn_id'))
        self.assertEqual(dag_id, getattr(op, 'api_version')) 
Example #25
Source File: test_baseoperator.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_nested_template_fields_declared_must_exist(self):
        """Test render_template when a nested template field is missing."""
        with DAG("test-dag", start_date=DEFAULT_DATE):
            task = DummyOperator(task_id="op1")

        with self.assertRaises(AttributeError) as e:
            task.render_template(ClassWithCustomAttributes(template_fields=["missing_field"]), {})

        self.assertEqual("'ClassWithCustomAttributes' object has no attribute 'missing_field'",
                         str(e.exception)) 
Example #26
Source File: test_baseoperator.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_chain(self):
        dag = DAG(dag_id='test_chain', start_date=datetime.now())
        [op1, op2, op3, op4, op5, op6] = [
            DummyOperator(task_id='t{i}'.format(i=i), dag=dag)
            for i in range(1, 7)
        ]
        chain(op1, [op2, op3], [op4, op5], op6)

        self.assertCountEqual([op2, op3], op1.get_direct_relatives(upstream=False))
        self.assertEqual([op4], op2.get_direct_relatives(upstream=False))
        self.assertEqual([op5], op3.get_direct_relatives(upstream=False))
        self.assertCountEqual([op4, op5], op6.get_direct_relatives(upstream=True)) 
Example #27
Source File: test_baseoperator.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_jinja_env_creation(self, mock_jinja_env):
        """Verify if a Jinja environment is created only once when templating."""
        with DAG("test-dag", start_date=DEFAULT_DATE):
            task = MockOperator(task_id="op1", arg1="{{ foo }}", arg2="{{ bar }}")

        task.render_template_fields(context={"foo": "whatever", "bar": "whatever"})
        self.assertEqual(mock_jinja_env.call_count, 1) 
Example #28
Source File: test_baseoperator.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_chain_different_length_iterable(self):
        dag = DAG(dag_id='test_chain', start_date=datetime.now())
        [op1, op2, op3, op4, op5] = [DummyOperator(task_id='t{i}'.format(i=i), dag=dag) for i in range(1, 6)]
        with self.assertRaises(AirflowException):
            chain([op1, op2], [op3, op4, op5]) 
Example #29
Source File: test_dag.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_dag_task_invalid_weight_rule(self):
        # Test if we enter an invalid weight rule
        with DAG('dag', start_date=DEFAULT_DATE, default_args={'owner': 'owner1'}):
            with self.assertRaises(AirflowException):
                DummyOperator(task_id='should_fail', weight_rule='no rule') 
Example #30
Source File: test_baseoperator.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_set_jinja_env_additional_option(self):
        """Test render_template given various input types."""
        with DAG("test-dag",
                 start_date=DEFAULT_DATE,
                 jinja_environment_kwargs={'keep_trailing_newline': True}):
            task = DummyOperator(task_id="op1")

        result = task.render_template("{{ foo }}\n\n", {"foo": "bar"})
        self.assertEqual(result, "bar\n\n")