airflow_pydantic.TaskArgs¶
- pydantic model airflow_pydantic.TaskArgs[source]¶
Bases:
BaseModel
Show JSON schema
{ "title": "TaskArgs", "type": "object", "properties": { "owner": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "the owner of the task. Using a meaningful description (e.g. user/person/team/role name) to clarify ownership is recommended.", "title": "Owner" }, "email": { "anyOf": [ { "items": { "type": "string" }, "type": "array" }, { "type": "null" } ], "default": null, "description": "the 'to' email address(es) used in email alerts", "title": "Email" }, "email_on_failure": { "anyOf": [ { "type": "boolean" }, { "type": "null" } ], "default": null, "description": "Indicates whether email alerts should be sent when a task failed", "title": "Email On Failure" }, "email_on_retry": { "anyOf": [ { "type": "boolean" }, { "type": "null" } ], "default": null, "description": "Indicates whether email alerts should be sent when a task is retried", "title": "Email On Retry" }, "retries": { "anyOf": [ { "type": "integer" }, { "type": "null" } ], "default": null, "description": "the number of retries that should be performed before failing the task", "title": "Retries" }, "retry_delay": { "anyOf": [ { "format": "duration", "type": "string" }, { "type": "null" } ], "default": null, "description": "delay between retries", "title": "Retry Delay" }, "retry_exponential_backoff": { "anyOf": [ { "type": "boolean" }, { "type": "null" } ], "default": null, "description": "allow progressively longer waits between retries by using exponential backoff algorithm on retry delay (delay will be converted into seconds)", "title": "Retry Exponential Backoff" }, "max_retry_delay": { "anyOf": [ { "format": "duration", "type": "string" }, { "type": "null" } ], "default": null, "description": "maximum delay interval between retries", "title": "Max Retry Delay" }, "start_date": { "anyOf": [ { "format": "date-time", "type": "string" }, { "maxItems": 2, "minItems": 2, "prefixItems": [ { "format": "date-time", "type": "string" }, { "type": "string" } ], "type": "array" }, { "type": "null" } ], "default": null, "description": "The start_date for the task, determines the execution_date for the first task instance. The best practice is to have the start_date rounded to your DAG\u2019s schedule_interval. Daily jobs have their start_date some day at 00:00:00, hourly jobs have their start_date at 00:00 of a specific hour. Note that Airflow simply looks at the latest execution_date and adds the schedule_interval to determine the next execution_date. It is also very important to note that different tasks\u2019 dependencies need to line up in time. If task A depends on task B and their start_date are offset in a way that their execution_date don\u2019t line up, A\u2019s dependencies will never be met. If you are looking to delay a task, for example running a daily task at 2AM, look into the TimeSensor and TimeDeltaSensor. We advise against using dynamic start_date and recommend using fixed ones. Read the FAQ entry about start_date for more information.", "title": "Start Date" }, "end_date": { "anyOf": [ { "format": "date-time", "type": "string" }, { "maxItems": 2, "minItems": 2, "prefixItems": [ { "format": "date-time", "type": "string" }, { "type": "string" } ], "type": "array" }, { "type": "null" } ], "default": null, "description": "if specified, the scheduler won\u2019t go beyond this date", "title": "End Date" }, "depends_on_past": { "anyOf": [ { "type": "boolean" }, { "type": "null" } ], "default": null, "description": "when set to true, task instances will run sequentially and only if the previous instance has succeeded or has been skipped. The task instance for the start_date is allowed to run.", "title": "Depends On Past" }, "queue": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "which queue to target when running this job. Not all executors implement queue management, the CeleryExecutor does support targeting specific queues.", "title": "Queue" }, "pool": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "the slot pool this task should run in, slot pools are a way to limit concurrency for certain tasks", "title": "Pool" }, "pool_slots": { "anyOf": [ { "type": "integer" }, { "type": "null" } ], "default": null, "description": "the number of pool slots this task should use (>= 1) Values less than 1 are not allowed", "title": "Pool Slots" }, "execution_timeout": { "anyOf": [ { "format": "duration", "type": "string" }, { "type": "null" } ], "default": null, "description": "max time allowed for the execution of this task instance, if it goes beyond it will raise and fail.", "title": "Execution Timeout" }, "trigger_rule": { "anyOf": [ { "$ref": "#/$defs/TriggerRule" }, { "type": "null" } ], "default": null, "description": "defines the rule by which dependencies are applied for the task to get triggered." }, "max_active_tis_per_dag": { "anyOf": [ { "type": "integer" }, { "type": "null" } ], "default": null, "description": "When set, a task will be able to limit the concurrent runs across execution_dates.", "title": "Max Active Tis Per Dag" }, "max_active_tis_per_dagrun": { "anyOf": [ { "type": "integer" }, { "type": "null" } ], "default": null, "description": "When set, a task will be able to limit the concurrent task instances per DAG run.", "title": "Max Active Tis Per Dagrun" }, "do_xcom_push": { "anyOf": [ { "type": "boolean" }, { "type": "null" } ], "default": null, "description": "if True, an XCom is pushed containing the Operator\u2019s result", "title": "Do Xcom Push" }, "multiple_outputs": { "anyOf": [ { "type": "boolean" }, { "type": "null" } ], "default": null, "description": "if True and do_xcom_push is True, pushes multiple XComs, one for each key in the returned dictionary result. If False and do_xcom_push is True, pushes a single XCom.", "title": "Multiple Outputs" }, "doc": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "Add documentation or notes to your Task objects that is visible in Task Instance details View in the Webserver. This is a generic field that can be used for any format, but it is recommended to use specific fields for structured formats like Markdown, RST, JSON, or YAML.", "title": "Doc" }, "doc_md": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "Add documentation in Markdown format or notes to your Task objects that is visible in Task Instance details View in the Webserver.", "title": "Doc Md" }, "doc_rst": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "Add documentation in RST format or notes to your Task objects that is visible in Task Instance details View in the Webserver.", "title": "Doc Rst" }, "doc_json": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "Add documentation in JSON format or notes to your Task objects that is visible in Task Instance details View in the Webserver.", "title": "Doc Json" }, "doc_yaml": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "Add documentation in YAML format or notes to your Task objects that is visible in Task Instance details View in the Webserver.", "title": "Doc Yaml" }, "task_display_name": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "The display name of the task which appears on the UI.", "title": "Task Display Name" } }, "$defs": { "TriggerRule": { "description": "Class with task's trigger rules.", "enum": [ "all_success", "all_failed", "all_done", "all_done_setup_success", "one_success", "one_failed", "one_done", "none_failed", "none_skipped", "always", "none_failed_min_one_success", "all_skipped" ], "title": "TriggerRule", "type": "string" } } }
- Fields:
depends_on_past (bool | None)
do_xcom_push (bool | None)
doc (str | None)
doc_json (str | None)
doc_md (str | None)
doc_rst (str | None)
doc_yaml (str | None)
email (List[str] | None)
email_on_failure (bool | None)
email_on_retry (bool | None)
end_date (datetime.datetime | Tuple[datetime.datetime, str] | None)
execution_timeout (datetime.timedelta | None)
max_active_tis_per_dag (int | None)
max_active_tis_per_dagrun (int | None)
max_retry_delay (datetime.timedelta | None)
multiple_outputs (bool | None)
owner (str | None)
pool (str | None)
pool_slots (int | None)
queue (str | None)
retries (int | None)
retry_delay (datetime.timedelta | None)
retry_exponential_backoff (bool | None)
start_date (datetime.datetime | Tuple[datetime.datetime, str] | None)
task_display_name (str | None)
trigger_rule (airflow.utils.trigger_rule.TriggerRule | None)
- field owner: str | None = None¶
the owner of the task. Using a meaningful description (e.g. user/person/team/role name) to clarify ownership is recommended.
- field email: List[str] | None = None¶
the ‘to’ email address(es) used in email alerts
- field email_on_failure: bool | None = None¶
Indicates whether email alerts should be sent when a task failed
- field email_on_retry: bool | None = None¶
Indicates whether email alerts should be sent when a task is retried
- field retries: int | None = None¶
the number of retries that should be performed before failing the task
- field retry_delay: timedelta | None = None¶
delay between retries
- field retry_exponential_backoff: bool | None = None¶
allow progressively longer waits between retries by using exponential backoff algorithm on retry delay (delay will be converted into seconds)
- field max_retry_delay: timedelta | None = None¶
maximum delay interval between retries
- field start_date: Annotated[datetime | Tuple[datetime, str], AfterValidator(func=_datetime_or_datetime_and_timezone)] | None = None¶
The start_date for the task, determines the execution_date for the first task instance. The best practice is to have the start_date rounded to your DAG’s schedule_interval. Daily jobs have their start_date some day at 00:00:00, hourly jobs have their start_date at 00:00 of a specific hour. Note that Airflow simply looks at the latest execution_date and adds the schedule_interval to determine the next execution_date. It is also very important to note that different tasks’ dependencies need to line up in time. If task A depends on task B and their start_date are offset in a way that their execution_date don’t line up, A’s dependencies will never be met. If you are looking to delay a task, for example running a daily task at 2AM, look into the TimeSensor and TimeDeltaSensor. We advise against using dynamic start_date and recommend using fixed ones. Read the FAQ entry about start_date for more information.
- field end_date: Annotated[datetime | Tuple[datetime, str], AfterValidator(func=_datetime_or_datetime_and_timezone)] | None = None¶
if specified, the scheduler won’t go beyond this date
- field depends_on_past: bool | None = None¶
when set to true, task instances will run sequentially and only if the previous instance has succeeded or has been skipped. The task instance for the start_date is allowed to run.
- field queue: str | None = None¶
which queue to target when running this job. Not all executors implement queue management, the CeleryExecutor does support targeting specific queues.
- field pool: str | None = None¶
the slot pool this task should run in, slot pools are a way to limit concurrency for certain tasks
- field pool_slots: int | None = None¶
the number of pool slots this task should use (>= 1) Values less than 1 are not allowed
- field execution_timeout: timedelta | None = None¶
max time allowed for the execution of this task instance, if it goes beyond it will raise and fail.
- field trigger_rule: TriggerRule | None = None¶
defines the rule by which dependencies are applied for the task to get triggered.
- field max_active_tis_per_dag: int | None = None¶
When set, a task will be able to limit the concurrent runs across execution_dates.
- field max_active_tis_per_dagrun: int | None = None¶
When set, a task will be able to limit the concurrent task instances per DAG run.
- field do_xcom_push: bool | None = None¶
if True, an XCom is pushed containing the Operator’s result
- field multiple_outputs: bool | None = None¶
if True and do_xcom_push is True, pushes multiple XComs, one for each key in the returned dictionary result. If False and do_xcom_push is True, pushes a single XCom.
- field doc: str | None = None¶
Add documentation or notes to your Task objects that is visible in Task Instance details View in the Webserver. This is a generic field that can be used for any format, but it is recommended to use specific fields for structured formats like Markdown, RST, JSON, or YAML.
- field doc_md: str | None = None¶
Add documentation in Markdown format or notes to your Task objects that is visible in Task Instance details View in the Webserver.
- field doc_rst: str | None = None¶
Add documentation in RST format or notes to your Task objects that is visible in Task Instance details View in the Webserver.
- field doc_json: str | None = None¶
Add documentation in JSON format or notes to your Task objects that is visible in Task Instance details View in the Webserver.
- field doc_yaml: str | None = None¶
Add documentation in YAML format or notes to your Task objects that is visible in Task Instance details View in the Webserver.
- field task_display_name: str | None = None¶
The display name of the task which appears on the UI.