Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable TCH004 and TCH005 rules #35475

Merged
merged 1 commit into from
Nov 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 1 addition & 2 deletions airflow/dag_processing/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
from airflow.configuration import conf
from airflow.exceptions import AirflowException, TaskNotFound
from airflow.models import SlaMiss, errors
from airflow.models.dag import DagModel
from airflow.models.dag import DAG, DagModel
from airflow.models.dagbag import DagBag
from airflow.models.dagrun import DagRun as DR
from airflow.models.dagwarning import DagWarning, DagWarningType
Expand All @@ -63,7 +63,6 @@
from sqlalchemy.orm.session import Session

from airflow.callbacks.callback_requests import CallbackRequest
from airflow.models.dag import DAG
from airflow.models.operator import Operator


Expand Down
2 changes: 2 additions & 0 deletions airflow/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ def __getattr__(name):
"DagPickle": "airflow.models.dagpickle",
"DagRun": "airflow.models.dagrun",
"DagTag": "airflow.models.dag",
"DagWarning": "airflow.models.dagwarning",
"DbCallbackRequest": "airflow.models.db_callback_request",
"ImportError": "airflow.models.errors",
"Log": "airflow.models.log",
Expand Down Expand Up @@ -120,6 +121,7 @@ def __getattr__(name):
from airflow.models.dagbag import DagBag
from airflow.models.dagpickle import DagPickle
from airflow.models.dagrun import DagRun
from airflow.models.dagwarning import DagWarning
from airflow.models.db_callback_request import DbCallbackRequest
from airflow.models.errors import ImportError
from airflow.models.log import Log
Expand Down
9 changes: 5 additions & 4 deletions airflow/serialization/pydantic/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,13 @@
# under the License.
import datetime
from functools import cached_property
from typing import TYPE_CHECKING, Optional
from typing import Optional

from pydantic import BaseModel as BaseModelPydantic

from airflow.executors.executor_loader import ExecutorLoader
from airflow.jobs.base_job_runner import BaseJobRunner

if TYPE_CHECKING:
from airflow.jobs.job import Job


def check_runner_initialized(job_runner: Optional[BaseJobRunner], job_type: str) -> BaseJobRunner:
if job_runner is None:
Expand Down Expand Up @@ -59,11 +56,15 @@ def executor(self):

@cached_property
def heartrate(self) -> float:
from airflow.jobs.job import Job

assert self.job_type is not None
return Job._heartrate(self.job_type)

def is_alive(self, grace_multiplier=2.1) -> bool:
"""Is this job currently alive."""
from airflow.jobs.job import Job

return Job._is_alive(
job_type=self.job_type,
heartrate=self.heartrate,
Expand Down
4 changes: 2 additions & 2 deletions airflow/serialization/serialized_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,9 @@

HAS_KUBERNETES: bool
try:
from kubernetes.client import models as k8s
from kubernetes.client import models as k8s # noqa: TCH004

from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator
from airflow.providers.cncf.kubernetes.pod_generator import PodGenerator # noqa: TCH004
Comment on lines -83 to +85
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The real dark magic happen here

elif var.__class__.__name__ == "V1Pod" and _has_kubernetes() and isinstance(var, k8s.V1Pod):
json_pod = PodGenerator.serialize_pod(var)
return cls._encode(json_pod, type_=DAT.POD)

That is only affect k8s and PodGenerator so better exclude only this lines from check rather then entire module

except ImportError:
pass

Expand Down
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,7 @@ extend-select = [
"D403",
"D412",
"D419",
"TCH001", # typing-only-first-party-import
"TCH002", # typing-only-third-party-import
"TCH", # Rules around TYPE_CHECKING blocks
"TID251", # Specific modules or module members that may not be imported or accessed
"TID253", # Ban certain modules from being imported at module level
]
Expand All @@ -71,6 +70,7 @@ extend-ignore = [
"D214",
"D215",
"E731",
"TCH003", # Do not move imports from stdlib to TYPE_CHECKING block
]

namespace-packages = ["airflow/providers"]
Expand Down Expand Up @@ -113,7 +113,7 @@ required-imports = ["from __future__ import annotations"]
combine-as-imports = true

[tool.ruff.per-file-ignores]
"airflow/models/__init__.py" = ["F401"]
"airflow/models/__init__.py" = ["F401", "TCH004"]
"airflow/models/sqla_models.py" = ["F401"]

# The test_python.py is needed because adding __future__.annotations breaks runtime checks that are
Expand Down