airflow_pydantic.Task¶
- pydantic model airflow_pydantic.Task[source]¶
Bases:
TaskArgs
,TaskRenderMixin
,TaskInstantiateMixin
Show JSON schema
{ "title": "Task", "type": "object", "properties": { "owner": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "the owner of the task. Using a meaningful description (e.g. user/person/team/role name) to clarify ownership is recommended.", "title": "Owner" }, "email": { "anyOf": [ { "items": { "type": "string" }, "type": "array" }, { "type": "null" } ], "default": null, "description": "the 'to' email address(es) used in email alerts", "title": "Email" }, "email_on_failure": { "anyOf": [ { "type": "boolean" }, { "type": "null" } ], "default": null, "description": "Indicates whether email alerts should be sent when a task failed", "title": "Email On Failure" }, "email_on_retry": { "anyOf": [ { "type": "boolean" }, { "type": "null" } ], "default": null, "description": "Indicates whether email alerts should be sent when a task is retried", "title": "Email On Retry" }, "retries": { "anyOf": [ { "type": "integer" }, { "type": "null" } ], "default": null, "description": "the number of retries that should be performed before failing the task", "title": "Retries" }, "retry_delay": { "anyOf": [ { "format": "duration", "type": "string" }, { "type": "null" } ], "default": null, "description": "delay between retries", "title": "Retry Delay" }, "retry_exponential_backoff": { "anyOf": [ { "type": "boolean" }, { "type": "null" } ], "default": null, "description": "allow progressively longer waits between retries by using exponential backoff algorithm on retry delay (delay will be converted into seconds)", "title": "Retry Exponential Backoff" }, "max_retry_delay": { "anyOf": [ { "format": "duration", "type": "string" }, { "type": "null" } ], "default": null, "description": "maximum delay interval between retries", "title": "Max Retry Delay" }, "start_date": { "anyOf": [ { "format": "date-time", "type": "string" }, { "maxItems": 2, "minItems": 2, "prefixItems": [ { "format": "date-time", "type": "string" }, { "type": "string" } ], "type": "array" }, { "type": "null" } ], "default": null, "description": "The start_date for the task, determines the execution_date for the first task instance. The best practice is to have the start_date rounded to your DAG\u2019s schedule_interval. Daily jobs have their start_date some day at 00:00:00, hourly jobs have their start_date at 00:00 of a specific hour. Note that Airflow simply looks at the latest execution_date and adds the schedule_interval to determine the next execution_date. It is also very important to note that different tasks\u2019 dependencies need to line up in time. If task A depends on task B and their start_date are offset in a way that their execution_date don\u2019t line up, A\u2019s dependencies will never be met. If you are looking to delay a task, for example running a daily task at 2AM, look into the TimeSensor and TimeDeltaSensor. We advise against using dynamic start_date and recommend using fixed ones. Read the FAQ entry about start_date for more information.", "title": "Start Date" }, "end_date": { "anyOf": [ { "format": "date-time", "type": "string" }, { "maxItems": 2, "minItems": 2, "prefixItems": [ { "format": "date-time", "type": "string" }, { "type": "string" } ], "type": "array" }, { "type": "null" } ], "default": null, "description": "if specified, the scheduler won\u2019t go beyond this date", "title": "End Date" }, "depends_on_past": { "anyOf": [ { "type": "boolean" }, { "type": "null" } ], "default": null, "description": "when set to true, task instances will run sequentially and only if the previous instance has succeeded or has been skipped. The task instance for the start_date is allowed to run.", "title": "Depends On Past" }, "queue": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "which queue to target when running this job. Not all executors implement queue management, the CeleryExecutor does support targeting specific queues.", "title": "Queue" }, "pool": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "the slot pool this task should run in, slot pools are a way to limit concurrency for certain tasks", "title": "Pool" }, "pool_slots": { "anyOf": [ { "type": "integer" }, { "type": "null" } ], "default": null, "description": "the number of pool slots this task should use (>= 1) Values less than 1 are not allowed", "title": "Pool Slots" }, "execution_timeout": { "anyOf": [ { "format": "duration", "type": "string" }, { "type": "null" } ], "default": null, "description": "max time allowed for the execution of this task instance, if it goes beyond it will raise and fail.", "title": "Execution Timeout" }, "trigger_rule": { "anyOf": [ { "$ref": "#/$defs/TriggerRule" }, { "type": "null" } ], "default": null, "description": "defines the rule by which dependencies are applied for the task to get triggered." }, "max_active_tis_per_dag": { "anyOf": [ { "type": "integer" }, { "type": "null" } ], "default": null, "description": "When set, a task will be able to limit the concurrent runs across execution_dates.", "title": "Max Active Tis Per Dag" }, "max_active_tis_per_dagrun": { "anyOf": [ { "type": "integer" }, { "type": "null" } ], "default": null, "description": "When set, a task will be able to limit the concurrent task instances per DAG run.", "title": "Max Active Tis Per Dagrun" }, "do_xcom_push": { "anyOf": [ { "type": "boolean" }, { "type": "null" } ], "default": null, "description": "if True, an XCom is pushed containing the Operator\u2019s result", "title": "Do Xcom Push" }, "multiple_outputs": { "anyOf": [ { "type": "boolean" }, { "type": "null" } ], "default": null, "description": "if True and do_xcom_push is True, pushes multiple XComs, one for each key in the returned dictionary result. If False and do_xcom_push is True, pushes a single XCom.", "title": "Multiple Outputs" }, "doc": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "Add documentation or notes to your Task objects that is visible in Task Instance details View in the Webserver. This is a generic field that can be used for any format, but it is recommended to use specific fields for structured formats like Markdown, RST, JSON, or YAML.", "title": "Doc" }, "doc_md": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "Add documentation in Markdown format or notes to your Task objects that is visible in Task Instance details View in the Webserver.", "title": "Doc Md" }, "doc_rst": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "Add documentation in RST format or notes to your Task objects that is visible in Task Instance details View in the Webserver.", "title": "Doc Rst" }, "doc_json": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "Add documentation in JSON format or notes to your Task objects that is visible in Task Instance details View in the Webserver.", "title": "Doc Json" }, "doc_yaml": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "Add documentation in YAML format or notes to your Task objects that is visible in Task Instance details View in the Webserver.", "title": "Doc Yaml" }, "task_display_name": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "The display name of the task which appears on the UI.", "title": "Task Display Name" }, "task_id": { "anyOf": [ { "type": "string" }, { "type": "null" } ], "default": null, "description": "a unique, meaningful id for the task", "title": "Task Id" }, "operator": { "default": null, "title": "Operator" }, "dependencies": { "default": null, "title": "Dependencies" } }, "$defs": { "TriggerRule": { "description": "Class with task's trigger rules.", "enum": [ "all_success", "all_failed", "all_done", "all_done_setup_success", "one_success", "one_failed", "one_done", "none_failed", "none_skipped", "always", "none_failed_min_one_success", "all_skipped" ], "title": "TriggerRule", "type": "string" } } }
- Fields:
dependencies (List[str | airflow_pydantic.task.Task] | List[str | Tuple[str, str] | Tuple[airflow_pydantic.task.Task, str]] | None)
operator (type)
task_id (str | None)
- field task_id: str | None = None¶
a unique, meaningful id for the task
- field operator: Annotated[type, BeforeValidator(func=get_import_path, json_schema_input_type=PydanticUndefined), PlainSerializer(func=serialize_path_as_string, return_type=str, when_used=json)] [Required]¶
airflow operator path