Skip to content

Commit

Permalink
Mark more fields on API as dump-only (#38616)
Browse files Browse the repository at this point in the history
(cherry picked from commit 35d1899)
  • Loading branch information
jscheffl authored and ephraimbuddy committed Mar 31, 2024
1 parent 5e77935 commit 3c214a5
Show file tree
Hide file tree
Showing 6 changed files with 58 additions and 60 deletions.
20 changes: 10 additions & 10 deletions airflow/api_connexion/schemas/dag_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,21 +96,21 @@ class DAGDetailSchema(DAGSchema):
"""DAG details."""

owners = fields.Method("get_owners", dump_only=True)
timezone = TimezoneField()
catchup = fields.Boolean()
orientation = fields.String()
concurrency = fields.Method("get_concurrency") # TODO: Remove in Airflow 3.0
max_active_tasks = fields.Integer()
timezone = TimezoneField(dump_only=True)
catchup = fields.Boolean(dump_only=True)
orientation = fields.String(dump_only=True)
concurrency = fields.Method("get_concurrency", dump_only=True) # TODO: Remove in Airflow 3.0
max_active_tasks = fields.Integer(dump_only=True)
dataset_expression = fields.Dict(allow_none=True)
start_date = fields.DateTime()
dag_run_timeout = fields.Nested(TimeDeltaSchema, attribute="dagrun_timeout")
doc_md = fields.String()
default_view = fields.String()
start_date = fields.DateTime(dump_only=True)
dag_run_timeout = fields.Nested(TimeDeltaSchema, attribute="dagrun_timeout", dump_only=True)
doc_md = fields.String(dump_only=True)
default_view = fields.String(dump_only=True)
params = fields.Method("get_params", dump_only=True)
tags = fields.Method("get_tags", dump_only=True) # type: ignore
is_paused = fields.Method("get_is_paused", dump_only=True)
is_active = fields.Method("get_is_active", dump_only=True)
is_paused_upon_creation = fields.Boolean()
is_paused_upon_creation = fields.Boolean(dump_only=True)
end_date = fields.DateTime(dump_only=True)
template_searchpath = fields.String(dump_only=True)
render_template_as_native_obj = fields.Boolean(dump_only=True)
Expand Down
6 changes: 3 additions & 3 deletions airflow/api_connexion/schemas/dag_warning_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,9 @@ class Meta:
model = DagWarning

dag_id = auto_field(data_key="dag_id", dump_only=True)
warning_type = auto_field()
message = auto_field()
timestamp = auto_field(format="iso")
warning_type = auto_field(dump_only=True)
message = auto_field(dump_only=True)
timestamp = auto_field(format="iso", dump_only=True)


class DagWarningCollection(NamedTuple):
Expand Down
8 changes: 3 additions & 5 deletions airflow/api_connexion/schemas/error_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,9 @@ class Meta:
model = ImportError

import_error_id = auto_field("id", dump_only=True)
timestamp = auto_field(format="iso")
filename = auto_field()
stack_trace = auto_field(
"stacktrace",
)
timestamp = auto_field(format="iso", dump_only=True)
filename = auto_field(dump_only=True)
stack_trace = auto_field("stacktrace", dump_only=True)


class ImportErrorCollection(NamedTuple):
Expand Down
4 changes: 2 additions & 2 deletions airflow/api_connexion/schemas/log_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
class LogsSchema(Schema):
"""Schema for logs."""

content = fields.Str()
continuation_token = fields.Str()
content = fields.Str(dump_only=True)
continuation_token = fields.Str(dump_only=True)


class LogResponseObject(NamedTuple):
Expand Down
26 changes: 13 additions & 13 deletions airflow/api_connexion/schemas/plugin_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,19 +24,19 @@
class PluginSchema(Schema):
"""Plugin schema."""

name = fields.String()
hooks = fields.List(fields.String())
executors = fields.List(fields.String())
macros = fields.List(fields.String())
flask_blueprints = fields.List(fields.String())
appbuilder_views = fields.List(fields.Dict())
appbuilder_menu_items = fields.List(fields.Dict())
global_operator_extra_links = fields.List(fields.String())
operator_extra_links = fields.List(fields.String())
source = fields.String()
ti_deps = fields.List(fields.String())
listeners = fields.List(fields.String())
timetables = fields.List(fields.String())
name = fields.String(dump_only=True)
hooks = fields.List(fields.String(dump_only=True))
executors = fields.List(fields.String(dump_only=True))
macros = fields.List(fields.String(dump_only=True))
flask_blueprints = fields.List(fields.String(dump_only=True))
appbuilder_views = fields.List(fields.Dict(dump_only=True))
appbuilder_menu_items = fields.List(fields.Dict(dump_only=True))
global_operator_extra_links = fields.List(fields.String(dump_only=True))
operator_extra_links = fields.List(fields.String(dump_only=True))
source = fields.String(dump_only=True)
ti_deps = fields.List(fields.String(dump_only=True))
listeners = fields.List(fields.String(dump_only=True))
timetables = fields.List(fields.String(dump_only=True))


class PluginCollection(NamedTuple):
Expand Down
54 changes: 27 additions & 27 deletions airflow/api_connexion/schemas/task_instance_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,34 +44,34 @@ class Meta:

model = TaskInstance

task_id = auto_field()
dag_id = auto_field()
run_id = auto_field(data_key="dag_run_id")
map_index = auto_field()
execution_date = auto_field()
start_date = auto_field()
end_date = auto_field()
duration = auto_field()
state = TaskInstanceStateField()
_try_number = auto_field(data_key="try_number")
max_tries = auto_field()
task_id = auto_field(dump_only=True)
dag_id = auto_field(dump_only=True)
run_id = auto_field(data_key="dag_run_id", dump_only=True)
map_index = auto_field(dump_only=True)
execution_date = auto_field(dump_only=True)
start_date = auto_field(dump_only=True)
end_date = auto_field(dump_only=True)
duration = auto_field(dump_only=True)
state = TaskInstanceStateField(dump_only=True)
_try_number = auto_field(data_key="try_number", dump_only=True)
max_tries = auto_field(dump_only=True)
task_display_name = fields.String(attribute="task_display_name", dump_only=True)
hostname = auto_field()
unixname = auto_field()
pool = auto_field()
pool_slots = auto_field()
queue = auto_field()
priority_weight = auto_field()
operator = auto_field()
queued_dttm = auto_field(data_key="queued_when")
pid = auto_field()
executor_config = auto_field()
note = auto_field()
sla_miss = fields.Nested(SlaMissSchema, dump_default=None)
rendered_map_index = auto_field()
rendered_fields = JsonObjectField(dump_default={})
trigger = fields.Nested(TriggerSchema)
triggerer_job = fields.Nested(JobSchema)
hostname = auto_field(dump_only=True)
unixname = auto_field(dump_only=True)
pool = auto_field(dump_only=True)
pool_slots = auto_field(dump_only=True)
queue = auto_field(dump_only=True)
priority_weight = auto_field(dump_only=True)
operator = auto_field(dump_only=True)
queued_dttm = auto_field(data_key="queued_when", dump_only=True)
pid = auto_field(dump_only=True)
executor_config = auto_field(dump_only=True)
note = auto_field(dump_only=True)
sla_miss = fields.Nested(SlaMissSchema, dump_default=None, dump_only=True)
rendered_map_index = auto_field(dump_only=True)
rendered_fields = JsonObjectField(dump_default={}, dump_only=True)
trigger = fields.Nested(TriggerSchema, dump_only=True)
triggerer_job = fields.Nested(JobSchema, dump_only=True)

def get_attribute(self, obj, attr, default):
if attr == "sla_miss":
Expand Down

0 comments on commit 3c214a5

Please sign in to comment.