Skip to content

Commit 8401000

Browse files
authored
CI: Upgrade important CI environment (#64744)
* CI: Upgrade important CI environment * style: fix provider mypy error * fixup! style: fix provider mypy error * fixup! fixup! style: fix provider mypy error * fixup! fixup! fixup! style: fix provider mypy error * fixup! fixup! fixup! style: fix provider mypy error * fixup! fixup! fixup! style: fix provider mypy error
1 parent 8ad491d commit 8401000

File tree

13 files changed

+1076
-1037
lines changed

13 files changed

+1076
-1037
lines changed

airflow-core/src/airflow/assets/manager.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -575,6 +575,8 @@ def resolve_asset_manager() -> AssetManager:
575575
key="asset_manager_kwargs",
576576
fallback={},
577577
)
578+
if TYPE_CHECKING:
579+
assert isinstance(_asset_manager_kwargs, dict)
578580
return _asset_manager_class(**_asset_manager_kwargs)
579581

580582

airflow-core/src/airflow/config_templates/airflow_local_settings.py

Lines changed: 26 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
from __future__ import annotations
2121

2222
import os
23-
from typing import TYPE_CHECKING, Any
23+
from typing import TYPE_CHECKING, Any, cast
2424
from urllib.parse import urlsplit
2525

2626
from airflow.configuration import conf
@@ -159,59 +159,63 @@ def _default_conn_name_from(mod_path, hook_name):
159159
"logging/remote_task_handler_kwargs must be a JSON object (a python dict), we got "
160160
f"{type(remote_task_handler_kwargs)}"
161161
)
162+
_handler_kwargs = cast("dict[str, Any]", remote_task_handler_kwargs)
162163
delete_local_copy = conf.getboolean("logging", "delete_local_logs")
163164

164165
if remote_base_log_folder.startswith("s3://"):
165166
from airflow.providers.amazon.aws.log.s3_task_handler import S3RemoteLogIO
166167

167168
_default_conn_name_from("airflow.providers.amazon.aws.hooks.s3", "S3Hook")
168169
REMOTE_TASK_LOG = S3RemoteLogIO(
169-
**(
170+
**cast(
171+
"dict[str, Any]",
170172
{
171173
"base_log_folder": BASE_LOG_FOLDER,
172174
"remote_base": remote_base_log_folder,
173175
"delete_local_copy": delete_local_copy,
174176
}
175-
| remote_task_handler_kwargs
177+
| _handler_kwargs,
176178
)
177179
)
178-
remote_task_handler_kwargs = {}
180+
_handler_kwargs = {}
179181

180182
elif remote_base_log_folder.startswith("cloudwatch://"):
181183
from airflow.providers.amazon.aws.log.cloudwatch_task_handler import CloudWatchRemoteLogIO
182184

183185
_default_conn_name_from("airflow.providers.amazon.aws.hooks.logs", "AwsLogsHook")
184186
url_parts = urlsplit(remote_base_log_folder)
185187
REMOTE_TASK_LOG = CloudWatchRemoteLogIO(
186-
**(
188+
**cast(
189+
"dict[str, Any]",
187190
{
188191
"base_log_folder": BASE_LOG_FOLDER,
189192
"remote_base": remote_base_log_folder,
190193
"delete_local_copy": delete_local_copy,
191194
"log_group_arn": url_parts.netloc + url_parts.path,
192195
}
193-
| remote_task_handler_kwargs
196+
| _handler_kwargs,
194197
)
195198
)
196-
remote_task_handler_kwargs = {}
199+
_handler_kwargs = {}
197200
elif remote_base_log_folder.startswith("gs://"):
198201
from airflow.providers.google.cloud.log.gcs_task_handler import GCSRemoteLogIO
199202

200203
_default_conn_name_from("airflow.providers.google.cloud.hooks.gcs", "GCSHook")
201204
key_path = conf.get_mandatory_value("logging", "google_key_path", fallback=None)
202205

203206
REMOTE_TASK_LOG = GCSRemoteLogIO(
204-
**(
207+
**cast(
208+
"dict[str, Any]",
205209
{
206210
"base_log_folder": BASE_LOG_FOLDER,
207211
"remote_base": remote_base_log_folder,
208212
"delete_local_copy": delete_local_copy,
209213
"gcp_key_path": key_path,
210214
}
211-
| remote_task_handler_kwargs
215+
| _handler_kwargs,
212216
)
213217
)
214-
remote_task_handler_kwargs = {}
218+
_handler_kwargs = {}
215219
elif remote_base_log_folder.startswith("wasb"):
216220
from airflow.providers.microsoft.azure.log.wasb_task_handler import WasbRemoteLogIO
217221

@@ -224,17 +228,18 @@ def _default_conn_name_from(mod_path, hook_name):
224228
wasb_remote_base = remote_base_log_folder.removeprefix("wasb://")
225229

226230
REMOTE_TASK_LOG = WasbRemoteLogIO(
227-
**(
231+
**cast(
232+
"dict[str, Any]",
228233
{
229234
"base_log_folder": BASE_LOG_FOLDER,
230235
"remote_base": wasb_remote_base,
231236
"delete_local_copy": delete_local_copy,
232237
"wasb_container": wasb_log_container,
233238
}
234-
| remote_task_handler_kwargs
239+
| _handler_kwargs,
235240
)
236241
)
237-
remote_task_handler_kwargs = {}
242+
_handler_kwargs = {}
238243
elif remote_base_log_folder.startswith("stackdriver://"):
239244
key_path = conf.get_mandatory_value("logging", "GOOGLE_KEY_PATH", fallback=None)
240245
# stackdriver:///airflow-tasks => airflow-tasks
@@ -255,32 +260,34 @@ def _default_conn_name_from(mod_path, hook_name):
255260
_default_conn_name_from("airflow.providers.alibaba.cloud.hooks.oss", "OSSHook")
256261

257262
REMOTE_TASK_LOG = OSSRemoteLogIO(
258-
**(
263+
**cast(
264+
"dict[str, Any]",
259265
{
260266
"base_log_folder": BASE_LOG_FOLDER,
261267
"remote_base": remote_base_log_folder,
262268
"delete_local_copy": delete_local_copy,
263269
}
264-
| remote_task_handler_kwargs
270+
| _handler_kwargs,
265271
)
266272
)
267-
remote_task_handler_kwargs = {}
273+
_handler_kwargs = {}
268274
elif remote_base_log_folder.startswith("hdfs://"):
269275
from airflow.providers.apache.hdfs.log.hdfs_task_handler import HdfsRemoteLogIO
270276

271277
_default_conn_name_from("airflow.providers.apache.hdfs.hooks.webhdfs", "WebHDFSHook")
272278

273279
REMOTE_TASK_LOG = HdfsRemoteLogIO(
274-
**(
280+
**cast(
281+
"dict[str, Any]",
275282
{
276283
"base_log_folder": BASE_LOG_FOLDER,
277284
"remote_base": urlsplit(remote_base_log_folder).path,
278285
"delete_local_copy": delete_local_copy,
279286
}
280-
| remote_task_handler_kwargs
287+
| _handler_kwargs,
281288
)
282289
)
283-
remote_task_handler_kwargs = {}
290+
_handler_kwargs = {}
284291
elif ELASTICSEARCH_HOST:
285292
from airflow.providers.elasticsearch.log.es_task_handler import ElasticsearchRemoteLogIO
286293

airflow-core/tests/unit/api_fastapi/core_api/routes/public/test_task_instances.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
import itertools
2222
import os
2323
from datetime import timedelta
24-
from typing import TYPE_CHECKING
24+
from typing import TYPE_CHECKING, Any
2525
from unittest import mock
2626

2727
import pendulum
@@ -151,7 +151,7 @@ def create_task_instances(
151151
assert dag_version
152152

153153
for mi in map_indexes:
154-
kwargs = self.ti_init | {"map_index": mi}
154+
kwargs: dict[str, Any] = self.ti_init | {"map_index": mi}
155155
ti = TaskInstance(task=tasks[i], **kwargs, dag_version_id=dag_version.id)
156156
session.add(ti)
157157
ti.dag_run = dr

dev/breeze/uv.lock

Lines changed: 22 additions & 22 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

devel-common/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ dependencies = [
113113
"mypy" = [
114114
# Mypy dependencies
115115
# TODO: upgrade to newer versions of MyPy continuously as they are released
116-
"mypy==1.19.1",
116+
"mypy==1.20.0",
117117
"types-Deprecated>=1.2.9.20240311",
118118
"types-Markdown>=3.6.0.20240316",
119119
"types-PyMySQL>=1.1.0.20240425",

providers/atlassian/jira/src/airflow/providers/atlassian/jira/sensors/jira.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ def __init__(
5252
if result_processor is not None:
5353
self.result_processor = result_processor
5454
self.method_name = method_name
55-
self.method_params = method_params
55+
self.method_params = method_params or {}
5656

5757
def poke(self, context: Context) -> Any:
5858
hook = JiraHook(jira_conn_id=self.jira_conn_id)

providers/edge3/src/airflow/providers/edge3/executors/edge_executor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ class EdgeExecutor(BaseExecutor):
5555

5656
def __init__(self, *args, **kwargs):
5757
super().__init__(*args, **kwargs)
58-
self.last_reported_state: dict[TaskInstanceKey, TaskInstanceState] = {}
58+
self.last_reported_state: dict[TaskInstanceKey, TaskInstanceState | str] = {}
5959

6060
# Check if self has the ExecutorConf set on the self.conf attribute with all required methods.
6161
# In Airflow 2.x, ExecutorConf exists but lacks methods like getint, getboolean, getsection, etc.

providers/github/src/airflow/providers/github/sensors/github.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ def __init__(
5454
if result_processor is not None:
5555
self.result_processor = result_processor
5656
self.method_name = method_name
57-
self.method_params = method_params
57+
self.method_params = method_params or {}
5858

5959
def poke(self, context: Context) -> bool:
6060
hook = GithubHook(github_conn_id=self.github_conn_id)

providers/google/src/airflow/providers/google/cloud/triggers/cloud_composer.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
import json
2323
from collections.abc import Collection, Iterable, Sequence
2424
from datetime import datetime
25-
from typing import Any
25+
from typing import TYPE_CHECKING, Any
2626

2727
from dateutil import parser
2828
from google.api_core.exceptions import NotFound
@@ -32,6 +32,9 @@
3232
from airflow.providers.google.cloud.hooks.cloud_composer import CloudComposerAsyncHook
3333
from airflow.triggers.base import BaseTrigger, TriggerEvent
3434

35+
if TYPE_CHECKING:
36+
from airflow.utils.state import TerminalTIState
37+
3538

3639
class CloudComposerExecutionTrigger(BaseTrigger):
3740
"""The trigger handles the async communication with the Google Cloud Composer."""
@@ -183,7 +186,7 @@ def __init__(
183186
composer_dag_id: str,
184187
start_date: datetime,
185188
end_date: datetime,
186-
allowed_states: list[str],
189+
allowed_states: list[str] | list[TerminalTIState],
187190
composer_dag_run_id: str | None = None,
188191
gcp_conn_id: str = "google_cloud_default",
189192
impersonation_chain: str | Sequence[str] | None = None,
@@ -358,7 +361,7 @@ def __init__(
358361
environment_id: str,
359362
start_date: datetime,
360363
end_date: datetime,
361-
allowed_states: list[str],
364+
allowed_states: list[str] | list[TerminalTIState],
362365
skipped_states: list[str],
363366
failed_states: list[str],
364367
composer_external_dag_id: str,

0 commit comments

Comments
 (0)