in Airflow im trying to us jinja template in airflow but the problem is it is not getting parsed and rather treated as a string . Please see my code
``
from datetime import datetime
from airflow.operators.python_operator import PythonOperator
from airflow.models import DAG
def test_method(dag,network_id,schema_name):
print "Schema_name in test_method", schema_name
third_task = PythonOperator(
task_id='first_task_' + network_id,
provide_context=True,
python_callable=print_context2,
dag=dag)
return third_task
dag = DAG('testing_xcoms_pull', description='Testing Xcoms',
schedule_interval='0 12 * * *',
start_date= datetime.today(),
catchup=False)
def print_context(ds, **kwargs):
return 'Returning from print_context'
def print_context2(ds, **kwargs):
return 'Returning from print_context2'
def get_schema(ds, **kwargs):
# Returning schema name based on network_id
schema_name = "my_schema"
return get_schema
first_task = PythonOperator(
task_id='first_task',
provide_context=True,
python_callable=print_context,
dag=dag)
second_task = PythonOperator(
task_id='second_task',
provide_context=True,
python_callable=get_schema,
dag=dag)
network_id = '{{ dag_run.conf["network_id"]}}'
first_task >> second_task >> test_method(
dag=dag,
network_id=network_id,
schema_name='{{ ti.xcom_pull("second_task")}}')
``
The Dag creation is failing because '{{ dag_run.conf["network_id"]}}' is taken as string by airflow. Can anyone help me with the problem in my code ???
Airflow operators have a variable called template_fields. This variable is usually declared at the top of the operator Class, check out any of the operators in the github code base.
If the field you are trying to pass Jinja template syntax into is not in the template_fields list the jinja syntax will appear as a string.
A DAG object, and its definition code, isn't parsed within the context an execution, it's parsed with regards to the environment available to it when loaded by Python.
The network_id variable, which you use to define the task_id in your function, isn't templated prior to execution, it can't be since there is no execution active. Even with templating you still need a valid, static, non-templated task_id value to instantiate a DAG object.
Related
Running Airflow 2.2.2
I would like to parametrize the http_conn_id using the DAG input parameters as such:
with DAG(params={'api': 'my-api-id'}) as dag:
post_op = SimpleHttpOperator(
task_id='post_op',
endpoint='custom-end-point',
http_conn_id='{{ params.api }}', # <- this doesn't get filled correctly
dag=dag)
Where my-api-id is set in the Airflow Connections.
However, when executing, the operator evaluates http_conn_id as '{{ params.api }}'.
I'm suspecting this is not possible - or is an anti-pattern?
Airflow operators do not render all the fields, they render only the fields which are listed in the attribute template_fields. For the operator SimpleHttpOperator, you have only the fiels:
template_fields: Sequence[str] = (
'endpoint',
'data',
'headers',
)
To get around the problem, you can create a new class which extend the official operator, and just add the extra fields you want to render:
from datetime import datetime
from airflow import DAG
from airflow.providers.http.operators.http import SimpleHttpOperator
class MyHttpOperator(SimpleHttpOperator):
template_fields = (
*SimpleHttpOperator.template_fields,
'http_conn_id'
)
with DAG(
dag_id='http_dag',
start_date=datetime.today(),
params={'api': 'my-api-id'}
) as dag:
post_op = MyHttpOperator(
task_id='post_op',
endpoint='custom-end-point',
http_conn_id='{{ params.api }}',
dag=dag
)
I am trying to use airflow variables to determine whether to execute a task or not. I have tried this and it's not working:
if '{{ params.year }}' == '{{ params.message }}':
run_this = DummyOperator (
task_id = 'dummy_dag'
)
I was hoping to get some help making it work. Also is there a better way of doing something like this in airflow?
I think a good way to solve this, is with BranchPythonOperator to branch dynamically based on the provided DAG parameters. Consider this example:
Use params to provide the parameters to the DAG (could be also done from the UI), in this example: {"enabled": True}
from airflow.decorators import dag, task
from airflow.utils.dates import days_ago
from airflow.operators.python import get_current_context, BranchPythonOperator
#dag(
default_args=default_args,
schedule_interval=None,
start_date=days_ago(1),
catchup=False,
tags=["example"],
params={"enabled": True},
)
def branch_from_dag_params():
def _print_enabled():
context = get_current_context()
enabled = context["params"].get("enabled", False)
print(f"Task id: {context['ti'].task_id}")
print(f"Enabled is: {enabled}")
#task
def task_a():
_print_enabled()
#task
def task_b():
_print_enabled()
Define a callable to the BranchPythonOperator in which you will perform your conditionals and return the next task to be executed. You can access the execution context variables from **kwargs. Also keep in mind that this operator should return a single task_id or a list of task_ids to follow downstream. Those resultant tasks should always be directly downstream from it.
def _get_task_run(ti, **kwargs):
custom_param = kwargs["params"].get("enabled", False)
if custom_param:
return "task_a"
else:
return "task_b"
branch_task = BranchPythonOperator(
task_id="branch_task",
python_callable=_get_task_run,
)
task_a_exec = task_a()
task_b_exec = task_b()
branch_task >> [task_a_exec, task_b_exec]
The result is that task_a gets executed and task_b is skipped :
AIRFLOW_CTX_DAG_OWNER=airflow
AIRFLOW_CTX_DAG_ID=branch_from_dag_params
AIRFLOW_CTX_TASK_ID=task_a
Task id: task_a
Enabled is: True
Let me know if that worked for you.
Docs
I am working on some simple Apache Airflow DAG. My goal is to:
1. calculate the data parameter based on the DAG run date - I try achieve that by the Python operator.
2. pass the parameter calculated above as a bq query parameter.
Any ideas are welcom.
My code below - I have marked the two points with I am struggling with by the 'TODO' label.
...
def set_date_param(dag_run_time):
# a business logic applied here
....
return "2020-05-28" # example result
# --------------------------------------------------------
# DAG definition below
# --------------------------------------------------------
# Python operator
set_data_param = PythonOperator(
task_id='set_data_param',
python_callable=set_data_param,
provide_cotext=True,
op_kwargs={
"dag_run_date": #TODO - how to pass the DAG running date as a function input parameter
},
dag=dag
)
# bq operator
load_data_to_bq_table = BigQueryOperator(
task_id='load_data_to_bq_table',
sql="""SELECT ccustomer_id, sales
FROM `my_project.dataset1.table1`
WHERE date_key = {date_key_param}
""".format(
date_key_param =
), #TODO - how to get the python operator results from the previous step
use_legacy_sql=False,
destination_dataset_table="my_project.dataset2.table2}",
trigger_rule='all_success',
dag=dag
)
set_data_param >> load_data_to_bq_table
For PythonOperator to pass the execution date to the python_callable, you only need to set provide_cotext=True (as it has been already done in your example). This way, Airflow automatically passes a collection of keyword arguments to the python callable, such that the names and values of these arguments are equivalent to the template variables described here. That is, if you define the python callable as set_data_param(ds, **kwargs): ..., the ds parameter will automatically get the execution date as a string value in the format YYYY-MM-DD.
XCOM allows task instances to exchange messages. To use the date returned by set_date_param() inside the sql query string of BigQueryOperator, you can combine XCOM with Jinja templating:
sql="""SELECT ccustomer_id, sales
FROM `my_project.dataset1.table1`
WHERE date_key = {{ task_instance.xcom_pull(task_ids='set_data_param') }}
"""
The following complete example puts all pieces together. In the example, the get_date task creates a date string based on the execution date. After that, the use_date task uses XCOM and Jinja templating to retrieve the date string and writes it to a log.
import logging
from airflow import DAG
from airflow.operators.python_operator import PythonOperator
from airflow.utils.dates import days_ago
default_args = {'start_date': days_ago(1)}
def calculate_date(ds, execution_date, **kwargs):
return f'{ds} ({execution_date.strftime("%m/%d/%Y")})'
def log_date(date_string):
logging.info(date_string)
with DAG(
'a_dag',
schedule_interval='*/5 * * * *',
default_args=default_args,
catchup=False,
) as dag:
get_date = PythonOperator(
task_id='get_date',
python_callable=calculate_date,
provide_context=True,
)
use_date = PythonOperator(
task_id='use_date',
python_callable=log_date,
op_args=['Date: {{ task_instance.xcom_pull(task_ids="get_date") }}'],
)
get_date >> use_date
I'm trying to use the Airflow macros in my Python Operator but I keep receiving "airflow: error: unrecognized arguments:"
So I am importing a function that has 3 positional arguments: (sys.argv,start_date,end_date) and I am hoping to make the start_date and end_date the execution date in Airflow.
The function arguments look something like this
def main(argv,start_date,end_date):
Here is the task I have in the DAG:
t1 = PythonOperator(
task_id='Pull_DCM_Report',
provide_context=True,
python_callable=main,
op_args=[sys.argv,'{{ ds }}','{{ ds }}'],
dag=dag)
Since you're passing in dates that need to be rendered by Airflow, you'll want to use the templates_dict parameter in the Python Operator. This field is the only one that Airflow will recognize as containing templates.
You can create a custom Python operator that recognizes more fields as templates by copy-ing the existing operator and add the relevant fields to the template_fields tuple.
def main(**kwargs):
argv = kwargs.get('templates_dict').get('argv')
start_date = kwargs.get('templates_dict').get('start_date')
end_date = kwargs.get('templates_dict').get('end_date')
t1 = PythonOperator(task_id='Pull_DCM_Report',
provide_context=True,
python_callable=main,
templates_dict={'argv': sys.argv,
'start_date': '{{ yesterday_ds }}',
'end_date': '{{ ds }}'},
dag=dag)
You can "wrap" the call to the main function with the following:
t1 = PythonOperator(
task_id='Pull_DCM_Report',
provide_context=True,
python_callable=lambda **context: main([], context["ds"], context["ds"]),
dag=dag)
If lambdas aren't your cup of tea you could define a function, call that, and have it call out to main.
Is there any way to make a user-defined macro in Airflow which is itself computed from other macros?
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
dag = DAG(
'simple',
schedule_interval='0 21 * * *',
user_defined_macros={
'next_execution_date': '{{ dag.following_schedule(execution_date) }}',
},
)
task = BashOperator(
task_id='bash_op',
bash_command='echo "{{ next_execution_date }}"',
dag=dag,
)
The use case here is to back-port the new Airflow v1.8 next_execution_date macro to work in Airflow v1.7. Unfortunately, this template is rendered without macro expansion:
$ airflow render simple bash_op 2017-08-09 21:00:00
# ----------------------------------------------------------
# property: bash_command
# ----------------------------------------------------------
echo "{{ dag.following_schedule(execution_date) }}"
Here are some solutions:
1. Override BashOperator to add some values to the context
class NextExecutionDateAwareBashOperator(BashOperator):
def render_template(self, attr, content, context):
dag = context['dag']
execution_date = context['execution_date']
context['next_execution_date'] = dag.following_schedule(execution_date)
return super().render_templates(attr, content, context)
# or in python 2:
# return super(NextExecutionDateAwareBashOperator, self).render_templates(attr, content, context)
The good part with this approach: you can capture some repeated code in your custom operator.
The bad part: you have to write a custom operator to add values to the context, before templated fields are rendered.
2. Do your computation in a user defined macro
Macros are not necessarily values. They can be functions.
In your dag :
def compute_next_execution_date(dag, execution_date):
return dag.following_schedule(execution_date)
dag = DAG(
'simple',
schedule_interval='0 21 * * *',
user_defined_macros={
'next_execution_date': compute_next_execution_date,
},
)
task = BashOperator(
task_id='bash_op',
bash_command='echo "{{ next_execution_date(dag, execution_date) }}"',
dag=dag,
)
The good part: you can define reusable functions to process values available at runtime (XCom values, job instance properties, task instance properties, etc...), and make your function result available to render a template.
The bad part (but not that annoying): you have to import such a function as a user defined macro in every dag where needed.
3. Call your statement directly in your template
This solution is the simplest (as mentioned by Ardan's answer), and probably the good one in your case.
BashOperator(
task_id='bash_op',
bash_command='echo "{{ dag.following_schedule(execution_date) }}"',
dag=dag,
)
Ideal for simple calls like this one. And they are some other objects directly available as macros (like task, task_instance, etc...); even some standard modules are available (like macros.time, ...).
I would vote for making Airflow Plugin to inject your pre-defined macros.
Using this method, you can use your pre-defined macro in any Operator without declare anything.
Below are some custom macros that we're using.
Example using: {{ macros.dagtz_next_execution_date(ti) }}
from airflow.plugins_manager import AirflowPlugin
from datetime import datetime, timedelta
from airflow.utils.db import provide_session
from airflow.models import DagRun
import pendulum
#provide_session
def _get_dag_run(ti, session=None):
"""Get DagRun obj of the TaskInstance ti
Args:
ti (TYPE): the TaskInstance object
session (None, optional): Not in use
Returns:
DagRun obj: the DagRun obj of the TaskInstance ti
"""
task = ti.task
dag_run = None
if hasattr(task, 'dag'):
dag_run = (
session.query(DagRun)
.filter_by(
dag_id=task.dag.dag_id,
execution_date=ti.execution_date)
.first()
)
session.expunge_all()
session.commit()
return dag_run
def ds_add_no_dash(ds, days):
"""
Add or subtract days from a YYYYMMDD
:param ds: anchor date in ``YYYYMMDD`` format to add to
:type ds: str
:param days: number of days to add to the ds, you can use negative values
:type days: int
>>> ds_add('20150101', 5)
'20150106'
>>> ds_add('20150106', -5)
'20150101'
"""
ds = datetime.strptime(ds, '%Y%m%d')
if days:
ds = ds + timedelta(days)
return ds.isoformat()[:10].replace('-', '')
def dagtz_execution_date(ti):
"""get the TaskInstance execution date (in DAG timezone) in pendulum obj
Args:
ti (TaskInstance): the TaskInstance object
Returns:
pendulum obj: execution_date in pendulum object (in DAG tz)
"""
execution_date_pdl = pendulum.instance(ti.execution_date)
dagtz_execution_date_pdl = execution_date_pdl.in_timezone(ti.task.dag.timezone)
return dagtz_execution_date_pdl
def dagtz_next_execution_date(ti):
"""get the TaskInstance next execution date (in DAG timezone) in pendulum obj
Args:
ti (TaskInstance): the TaskInstance object
Returns:
pendulum obj: next execution_date in pendulum object (in DAG tz)
"""
# For manually triggered dagruns that aren't run on a schedule, next/previous
# schedule dates don't make sense, and should be set to execution date for
# consistency with how execution_date is set for manually triggered tasks, i.e.
# triggered_date == execution_date.
dag_run = _get_dag_run(ti)
if dag_run and dag_run.external_trigger:
next_execution_date = ti.execution_date
else:
next_execution_date = ti.task.dag.following_schedule(ti.execution_date)
next_execution_date_pdl = pendulum.instance(next_execution_date)
dagtz_next_execution_date_pdl = next_execution_date_pdl.in_timezone(ti.task.dag.timezone)
return dagtz_next_execution_date_pdl
def dagtz_next_ds(ti):
"""get the TaskInstance next execution date (in DAG timezone) in YYYY-MM-DD string
"""
dagtz_next_execution_date_pdl = dagtz_next_execution_date(ti)
return dagtz_next_execution_date_pdl.strftime('%Y-%m-%d')
def dagtz_next_ds_nodash(ti):
"""get the TaskInstance next execution date (in DAG timezone) in YYYYMMDD string
"""
dagtz_next_ds_str = dagtz_next_ds(ti)
return dagtz_next_ds_str.replace('-', '')
def dagtz_prev_execution_date(ti):
"""get the TaskInstance previous execution date (in DAG timezone) in pendulum obj
Args:
ti (TaskInstance): the TaskInstance object
Returns:
pendulum obj: previous execution_date in pendulum object (in DAG tz)
"""
# For manually triggered dagruns that aren't run on a schedule, next/previous
# schedule dates don't make sense, and should be set to execution date for
# consistency with how execution_date is set for manually triggered tasks, i.e.
# triggered_date == execution_date.
dag_run = _get_dag_run(ti)
if dag_run and dag_run.external_trigger:
prev_execution_date = ti.execution_date
else:
prev_execution_date = ti.task.dag.previous_schedule(ti.execution_date)
prev_execution_date_pdl = pendulum.instance(prev_execution_date)
dagtz_prev_execution_date_pdl = prev_execution_date_pdl.in_timezone(ti.task.dag.timezone)
return dagtz_prev_execution_date_pdl
def dagtz_prev_ds(ti):
"""get the TaskInstance prev execution date (in DAG timezone) in YYYY-MM-DD string
"""
dagtz_prev_execution_date_pdl = dagtz_prev_execution_date(ti)
return dagtz_prev_execution_date_pdl.strftime('%Y-%m-%d')
def dagtz_prev_ds_nodash(ti):
"""get the TaskInstance prev execution date (in DAG timezone) in YYYYMMDD string
"""
dagtz_prev_ds_str = dagtz_prev_ds(ti)
return dagtz_prev_ds_str.replace('-', '')
# Defining the plugin class
class AirflowTestPlugin(AirflowPlugin):
name = "custom_macros"
macros = [dagtz_execution_date, ds_add_no_dash,
dagtz_next_execution_date, dagtz_next_ds, dagtz_next_ds_nodash,
dagtz_prev_execution_date, dagtz_prev_ds, dagtz_prev_ds_nodash]
user_defined_macros are not processed as templates by default. If you want to keep a template in a user_defined_macro (or if you use a template in a params variable), you can always re-run the templating function manually:
class DoubleTemplatedBashOperator(BashOperator):
def pre_execute(self, context):
context['ti'].render_templates()
And this will work for templates that don't also reference other parameters or UDMs. This way, you can have "two-deep" templates.
Or put your UDM directly in the BashOperator's command instead (the easiest solution):
BashOperator(
task_id='bash_op',
bash_command='echo "{{ dag.following_schedule(execution_date) }}"',
dag=dag,
)
None of these was working for me so heres what I did, I used the user_defined_macros but I pass all template variables to my macro and then I use jinja to render the result
MACRO_CONFIG = 'config({"data_interval_start": data_interval_start, "data_interval_end": data_interval_end, "ds": ds, "ds_nodash": ds_nodash, "ts": ts, "ts_nodash_with_tz": ts_nodash_with_tz, "ts_nodash": ts_nodash, "prev_data_interval_start_success": prev_data_interval_start_success, "prev_data_interval_end_success": prev_data_interval_end_success, "dag": dag, "task": task, "macros": macros, "task_instance": task_instance, "ti": ti, "params": params, "conn": conn, "task_instance_key_str": task_instance_key_str, "conf": conf, "run_id": run_id, "dag_run": dag_run, "test_mode": test_mode})'
def config_macro(context):
return FunctionThatReturnsTemplates(context)
with DAG(
'my-dag-id',
schedule_interval=None,
start_date=days_ago(1),
user_defined_macros={'config': config_macro}
) as dag:
...
def config_macro_template(attr_name):
return '{{' + MACRO_CONFIG + '.' + attr_name + '}}'
class FunctionThatReturnsTemplates(object):
def __getattribute__(self, name):
attr = object.__getattribute__(self, name)
logging.info('attr')
logging.info(attr)
logging.info("type(attr)")
logging.info(type(attr))
if callable(attr):
logging.info('method attr')
def render_result(*args, **kwargs):
logging.info('before calling %s' % attr.__name__)
result = attr(*args, **kwargs)
logging.info('done calling %s' % attr.__name__)
return Template(result).render(**self.context) if isinstance(result, str) or isinstance(result, unicode) else result
return render_result
logging.info('attr is not method')
if isinstance(attr, str) or isinstance(attr, unicode):
logging.info('attr is string or unicode')
result = Template(attr).render(**self.context)
logging.info(result)
logging.info("result")
return result
return attr
def __init__(self, context):
logging.info('from sampling pipeline context')
logging.info(context)
self.context = context
...
my_task = SomeOperator(
templated_field=config_macro_template('function(args)'),
task_id='my-task-id'
)