Python airflow.operators.bash_operator.BashOperator() Examples

The following are 11 code examples of airflow.operators.bash_operator.BashOperator(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module airflow.operators.bash_operator , or try the search function .
Example #1
Source File: airflowfile.py    From data-testing-with-airflow with Apache License 2.0 6 votes vote down vote up
def send_slack_alert(context=None):
    """Send slack alert on failure to alert the team"""
    payload_vars = {
        'url': 'your_slack_hook_url_here',
        'run_id': str(context['run_id']),
        'task': str(context['task']),
        'dag_name': str(context['dag'].dag_id)
    }

    error_message = "{dag_name} Failure! Task failed: {task} Check log at: {run_id}".format(**payload_vars)
    payload_vars['json'] = """payload={{"channel":"ChuckNorris","text":"{0}"}}""".format(error_message)

    slack_cmd = """curl -x proxy:port \
    -X POST \
    --data-urlencode '{json}' \
    {url}""".format(**payload_vars)

    slack_alert = BashOperator(
        task_id='slack_alert',
        dag=dag,
        bash_command=slack_cmd,
    )
    slack_alert.execute(context) 
Example #2
Source File: common_step_factory.py    From shipyard with Apache License 2.0 6 votes vote down vote up
def get_upgrade_airflow(self, task_id=dn.UPGRADE_AIRFLOW):
        """Generate the upgrade_airflow step

        Step responsible for upgrading airflow worker. Step will
        execute the upgrade script in the background and direct
        output to null so that 'nohup.out' will not be created.
        Note that this is done intentionally so that the upgrade
        of airflow worker will only start after the completion of
        the 'update_site' workflow. This will ensure availability
        of airflow worker during update/upgrade and prevent any
        disruption to the workflow. Note that dag_id and execution
        date are required for proper execution of the script.
        """
        return BashOperator(task_id=task_id,
                            bash_command=(
                                "nohup "
                                "/usr/local/airflow/upgrade_airflow_worker.sh "
                                "{{ ti.dag_id }} {{ ti.execution_date }} "
                                ">/dev/null 2>&1 &"),
                            dag=self.dag) 
Example #3
Source File: sync_commoncrawl_workflow.py    From cccatalog with MIT License 5 votes vote down vote up
def get_runner_operator(dag):
    return BashOperator(task_id="sync_commoncrawl_workflow",
                        bash_command=f"python {airflowHome}/dags/"
                        "commoncrawl_s3_syncer/SyncImageProviders.py",
                        dag=dag) 
Example #4
Source File: operator_util.py    From cccatalog with MIT License 5 votes vote down vote up
def get_runner_operator(dag, source, script_location):
    return BashOperator(
        task_id=f'get_{source}_images',
        bash_command=f'python {script_location} --mode default',
        dag=dag
    ) 
Example #5
Source File: operator_util.py    From cccatalog with MIT License 5 votes vote down vote up
def get_log_operator(dag, source, status):
    return BashOperator(
        task_id=f'{source}_{status}',
        bash_command=f'echo {status} {source} workflow at $(date)',
        dag=dag
    ) 
Example #6
Source File: test_tags.py    From dagster with Apache License 2.0 5 votes vote down vote up
def get_dag():
    dag = DAG(dag_id='dag', default_args=default_args, schedule_interval=None,)

    templated_command = '''
    echo '{{ ds }}'
    '''

    # pylint: disable=unused-variable
    t1 = BashOperator(
        task_id='templated', depends_on_past=False, bash_command=templated_command, dag=dag,
    )

    return dag 
Example #7
Source File: dataproc.py    From telemetry-airflow with Mozilla Public License 2.0 5 votes vote down vote up
def copy_artifacts_dev(dag, project_id, artifact_bucket, storage_bucket):
    """Bootstrap a dataproc job for local testing.

    This job requires setting GOOGLE_APPLICATION_CREDENTIALS before starting the
    airflow container. It will copy the contents of the local jobs and
    dataproc_boostrap folders to the artifacts bucket, and create a scratch
    storage bucket for dataproc.

    :dag DAG: The dag to register the job
    :project_id str: The project id, necessary for setting the default project
    :artifact_bucket str: The bucket for storing bootstrap artifacts
    :storage_bucket str: The scratch bucket for dataproc
    """
    return BashOperator(
        task_id="copy_to_dev_artifacts",
        bash_command="""
        gcloud auth activate-service-account --key-file ~/.credentials || cat ~/.credentials
        gcloud config set project ${PROJECT_ID}

        gsutil mb gs://${ARTIFACT_BUCKET}
        gsutil mb gs://${STORAGE_BUCKET}

        gsutil -m cp -r ~/dataproc_bootstrap gs://${ARTIFACT_BUCKET}
        gsutil -m cp -r ~/jobs gs://${ARTIFACT_BUCKET}

        echo "listing artifacts..."
        gsutil ls -r gs://${ARTIFACT_BUCKET}
        """,
        env={
            # https://github.com/GoogleCloudPlatform/gsutil/issues/236
            "CLOUDSDK_PYTHON": "python",
            "PROJECT_ID": project_id,
            "ARTIFACT_BUCKET": artifact_bucket,
            "STORAGE_BUCKET": storage_bucket,
        },
        dag=dag,
    )


# parameters that can be used to reconfigure a dataproc job for dev testing 
Example #8
Source File: common_step_factory.py    From shipyard with Apache License 2.0 5 votes vote down vote up
def get_skip_upgrade_airflow(self, task_id=dn.SKIP_UPGRADE_AIRFLOW):
        """Generate the skip_upgrade_airflow step

        Step will print a message stating that we do not need to
        upgrade the airflow worker
        """
        return BashOperator(task_id=task_id,
                            bash_command=(
                                "echo 'Airflow Worker Upgrade Not Required'"),
                            dag=self.dag) 
Example #9
Source File: test_dagbuilder.py    From dag-factory with MIT License 5 votes vote down vote up
def test_get_dag_params():
    td = dagbuilder.DagBuilder("test_dag", DAG_CONFIG, DEFAULT_CONFIG)
    expected = {
        "dag_id": "test_dag",
        "default_args": {
            "owner": "custom_owner",
            "start_date": datetime.datetime(2018, 3, 1, 0, 0, tzinfo=UTC),
            "end_date": datetime.datetime(2018, 3, 5, 0, 0, tzinfo=UTC),
            "retries": 1,
            "retry_delay": datetime.timedelta(seconds=300),
        },
        "description": "this is an example dag",
        "schedule_interval": "0 3 * * *",
        "concurrency": 1,
        "max_active_runs": 1,
        "dagrun_timeout": datetime.timedelta(seconds=600),
        "tags": ["tag1", "tag2"],
        "tasks": {
            "task_1": {
                "operator": "airflow.operators.bash_operator.BashOperator",
                "bash_command": "echo 1",
                "execution_timeout_secs": 5
            },
            "task_2": {
                "operator": "airflow.operators.bash_operator.BashOperator",
                "bash_command": "echo 2",
                "dependencies": ["task_1"],
            },
            "task_3": {
                "operator": "airflow.operators.bash_operator.BashOperator",
                "bash_command": "echo 3",
                "dependencies": ["task_1"],
            },
        },
    }
    actual = td.get_dag_params()
    assert actual == expected 
Example #10
Source File: test_dagbuilder.py    From dag-factory with MIT License 5 votes vote down vote up
def test_make_task_valid():
    td = dagbuilder.DagBuilder("test_dag", DAG_CONFIG, DEFAULT_CONFIG)
    operator = "airflow.operators.bash_operator.BashOperator"
    task_params = {"task_id": "test_task", "bash_command": "echo 1","execution_timeout_secs":5}
    actual = td.make_task(operator, task_params)
    assert actual.task_id == "test_task"
    assert actual.bash_command == "echo 1"
    assert isinstance(actual, BashOperator) 
Example #11
Source File: test_dagbuilder.py    From dag-factory with MIT License 5 votes vote down vote up
def test_make_task_missing_required_param():
    td = dagbuilder.DagBuilder("test_dag", DAG_CONFIG, DEFAULT_CONFIG)
    operator = "airflow.operators.bash_operator.BashOperator"
    task_params = {"task_id": "test_task"}
    with pytest.raises(Exception):
        td.make_task(operator, task_params)