Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

remove unused "type ignores" with pyright #7026

Merged
merged 2 commits into from Aug 8, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion pydantic/__init__.py
Expand Up @@ -21,7 +21,7 @@
from ._migration import getattr_migration
from .config import ConfigDict, Extra
from .deprecated.class_validators import root_validator, validator
from .deprecated.config import BaseConfig # type: ignore
from .deprecated.config import BaseConfig
from .deprecated.tools import *
from .errors import *
from .fields import AliasChoices, AliasPath, Field, PrivateAttr, computed_field
Expand Down
6 changes: 3 additions & 3 deletions pydantic/_internal/_core_metadata.py
Expand Up @@ -59,7 +59,7 @@ def metadata(self) -> CoreMetadata:
self._schema['metadata'] = metadata = CoreMetadata()
if not isinstance(metadata, dict):
raise TypeError(f'CoreSchema metadata should be a dict; got {metadata!r}.')
return metadata # type: ignore[return-value]
return metadata


def build_metadata_dict(
Expand All @@ -80,9 +80,9 @@ def build_metadata_dict(
pydantic_js_annotation_functions=js_annotation_functions or [],
pydantic_js_prefer_positional_arguments=js_prefer_positional_arguments,
)
metadata = {k: v for k, v in metadata.items() if v is not None} # type: ignore[assignment]
metadata = {k: v for k, v in metadata.items() if v is not None}

if initial_metadata is not None:
metadata = {**initial_metadata, **metadata} # type: ignore[misc]
metadata = {**initial_metadata, **metadata}

return metadata
4 changes: 2 additions & 2 deletions pydantic/_internal/_core_utils.py
Expand Up @@ -107,7 +107,7 @@ def get_ref(s: core_schema.CoreSchema) -> None | str:
"""Get the ref from the schema if it has one.
This exists just for type checking to work correctly.
"""
return s.get('ref', None) # type: ignore
return s.get('ref', None)


def collect_definitions(schema: core_schema.CoreSchema) -> dict[str, core_schema.CoreSchema]:
Expand All @@ -134,7 +134,7 @@ def define_expected_missing_refs(
refs = set()

def _record_refs(s: core_schema.CoreSchema, recurse: Recurse) -> core_schema.CoreSchema:
ref: str | None = s.get('ref') # type: ignore[assignment]
ref: str | None = s.get('ref')
if ref:
refs.add(ref)
return recurse(s, _record_refs)
Expand Down
6 changes: 3 additions & 3 deletions pydantic/_internal/_decorators.py
Expand Up @@ -21,7 +21,7 @@
from ..functional_validators import FieldValidatorModes

try:
from functools import cached_property # type: ignore
from functools import cached_property
except ImportError:
# python 3.7
cached_property = None
Expand Down Expand Up @@ -714,9 +714,9 @@ def unwrap_wrapped_function(
all.update({partial, partialmethod})

try:
from functools import cached_property # type: ignore
from functools import cached_property
except ImportError:
cached_property = type('', (), {}) # type: ignore
cached_property = type('', (), {})
else:
all.add(cached_property)

Expand Down
4 changes: 2 additions & 2 deletions pydantic/_internal/_generate_schema.py
Expand Up @@ -253,7 +253,7 @@ def _add_custom_serialization_from_json_encoders(
)

# TODO: in theory we should check that the schema accepts a serialization key
schema['serialization'] = core_schema.plain_serializer_function_ser_schema(encoder, when_used='json') # type: ignore
schema['serialization'] = core_schema.plain_serializer_function_ser_schema(encoder, when_used='json')
return schema

return schema
Expand Down Expand Up @@ -641,7 +641,7 @@ def _get_args_resolving_forward_refs(self, obj: Any, required: bool = False) ->
args = tuple([self._resolve_forward_ref(a) if isinstance(a, ForwardRef) else a for a in args])
elif required: # pragma: no cover
raise TypeError(f'Expected {obj} to have generic parameters but it had none')
return args # type: ignore
return args

def _get_first_arg_or_any(self, obj: Any) -> Any:
args = self._get_args_resolving_forward_refs(obj)
Expand Down
8 changes: 4 additions & 4 deletions pydantic/_internal/_generics.py
Expand Up @@ -36,7 +36,7 @@
_LIMITED_DICT_SIZE = 100
if TYPE_CHECKING:

class LimitedDict(dict, MutableMapping[KT, VT]): # type: ignore[type-arg]
class LimitedDict(dict, MutableMapping[KT, VT]):
def __init__(self, size_limit: int = _LIMITED_DICT_SIZE):
...

Expand Down Expand Up @@ -74,7 +74,7 @@ def __class_getitem__(cls, *args: Any) -> Any:

if TYPE_CHECKING:

class DeepChainMap(ChainMap[KT, VT]): # type: ignore
class DeepChainMap(ChainMap[KT, VT]):
...

else:
Expand Down Expand Up @@ -235,7 +235,7 @@ def get_standard_typevars_map(cls: type[Any]) -> dict[TypeVarType, Any] | None:
# In this case, we know that cls is a _GenericAlias, and origin is the generic type
# So it is safe to access cls.__args__ and origin.__parameters__
args: tuple[Any, ...] = cls.__args__ # type: ignore
parameters: tuple[TypeVarType, ...] = origin.__parameters__ # type: ignore
parameters: tuple[TypeVarType, ...] = origin.__parameters__
return dict(zip(parameters, args))


Expand Down Expand Up @@ -323,7 +323,7 @@ def replace_types(type_: Any, type_map: Mapping[Any, Any] | None) -> Any:
resolved_type_args = tuple(replace_types(t, type_map) for t in parameters)
if all_identical(parameters, resolved_type_args):
return type_
return type_[resolved_type_args] # type: ignore[index]
return type_[resolved_type_args]

# Handle special case for typehints that can have lists as arguments.
# `typing.Callable[[int, str], int]` is an example for this.
Expand Down
2 changes: 1 addition & 1 deletion pydantic/_internal/_mock_validator.py
Expand Up @@ -77,7 +77,7 @@ def attempt_rebuild() -> SchemaValidator | None:
from ..dataclasses import rebuild_dataclass

if rebuild_dataclass(cls, raise_errors=False, _parent_namespace_depth=5):
return cls.__pydantic_validator__ # type: ignore
return cls.__pydantic_validator__
else:
return None

Expand Down
2 changes: 1 addition & 1 deletion pydantic/_internal/_model_construction.py
Expand Up @@ -50,7 +50,7 @@
object_setattr = object.__setattr__


class _ModelNamespaceDict(dict): # type: ignore[type-arg]
class _ModelNamespaceDict(dict):
"""A dictionary subclass that intercepts attribute setting on model classes and
warns about overriding of decorators.
"""
Expand Down
4 changes: 2 additions & 2 deletions pydantic/_internal/_std_types_schema.py
Expand Up @@ -76,7 +76,7 @@ def get_enum_core_schema(enum_type: type[Enum], config: ConfigDict) -> CoreSchem

def to_enum(__input_value: Any) -> Enum:
try:
enum_field = enum_type(__input_value) # type: ignore
enum_field = enum_type(__input_value)
if use_enum_values:
return enum_field.value
return enum_field
Expand Down Expand Up @@ -398,7 +398,7 @@ def path_schema_prepare_pydantic_annotations(

def path_validator(input_value: str) -> os.PathLike[Any]:
try:
return construct_path(input_value) # type: ignore
return construct_path(input_value)
except TypeError as e:
raise PydanticCustomError('path_type', 'Input is not a valid path') from e

Expand Down
2 changes: 1 addition & 1 deletion pydantic/_internal/_typing_extra.py
Expand Up @@ -60,7 +60,7 @@ def origin_is_union(tp: type[Any] | None) -> bool:

LITERAL_TYPES: set[Any] = {Literal}
if hasattr(typing, 'Literal'):
LITERAL_TYPES.add(typing.Literal) # type: ignore[attr-defined]
LITERAL_TYPES.add(typing.Literal)

NONE_TYPES: tuple[Any, ...] = (None, NoneType, *(tp[None] for tp in LITERAL_TYPES))

Expand Down
4 changes: 2 additions & 2 deletions pydantic/_internal/_utils.py
Expand Up @@ -257,7 +257,7 @@ def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any:
def _coerce_items(items: AbstractSetIntStr | MappingIntStrAny) -> MappingIntStrAny:
if isinstance(items, typing.Mapping):
pass
elif isinstance(items, typing.AbstractSet): # type: ignore
elif isinstance(items, typing.AbstractSet):
items = dict.fromkeys(items, ...) # type: ignore
else:
class_name = getattr(items, '__class__', '???')
Expand Down Expand Up @@ -314,7 +314,7 @@ def smart_deepcopy(obj: Obj) -> Obj:
try:
if not obj and obj_type in BUILTIN_COLLECTIONS:
# faster way for empty collections, no need to copy its members
return obj if obj_type is tuple else obj.copy() # type: ignore # tuple doesn't have copy method
return obj if obj_type is tuple else obj.copy() # tuple doesn't have copy method
except (TypeError, ValueError, RuntimeError):
# do we really dare to catch ALL errors? Seems a bit risky
pass
Expand Down
2 changes: 1 addition & 1 deletion pydantic/_internal/_validators.py
Expand Up @@ -107,7 +107,7 @@ def _import_string_logic(dotted_path: str) -> Any:

def pattern_either_validator(__input_value: Any) -> typing.Pattern[Any]:
if isinstance(__input_value, typing.Pattern):
return __input_value # type: ignore
return __input_value
elif isinstance(__input_value, (str, bytes)):
# todo strict mode
return compile_pattern(__input_value) # type: ignore
Expand Down
4 changes: 2 additions & 2 deletions pydantic/dataclasses.py
Expand Up @@ -180,7 +180,7 @@ def create_dataclass(cls: type[Any]) -> type[PydanticDataclass]:
# If the class is generic, we need to make sure the subclass also inherits from Generic
# with all the same parameters.
bases = (cls,)
if issubclass(cls, Generic): # type: ignore
if issubclass(cls, Generic):
generic_base = Generic[cls.__parameters__] # type: ignore
bases = bases + (generic_base,)
cls = types.new_class(cls.__name__, bases)
Expand Down Expand Up @@ -226,7 +226,7 @@ def _call_initvar(*args: Any, **kwargs: Any) -> NoReturn:
"""
raise TypeError("'InitVar' object is not callable")

dataclasses.InitVar.__call__ = _call_initvar # type: ignore
dataclasses.InitVar.__call__ = _call_initvar


def rebuild_dataclass(
Expand Down
2 changes: 1 addition & 1 deletion pydantic/deprecated/class_validators.py
Expand Up @@ -118,7 +118,7 @@ def validator(
"E.g. usage should be `@validator('<field_name>', ...)`",
code='validator-no-fields',
)
elif not all(isinstance(field, str) for field in fields): # type: ignore
elif not all(isinstance(field, str) for field in fields):
raise PydanticUserError(
"`@validator` fields should be passed as separate string args. "
"E.g. usage should be `@validator('<field_name_1>', '<field_name_2>', ...)`",
Expand Down
2 changes: 1 addition & 1 deletion pydantic/deprecated/copy_internals.py
Expand Up @@ -81,7 +81,7 @@ def _iter(
dict_key = field_key

if to_dict or value_include or value_exclude:
v = _get_value( # type: ignore[no-untyped-call]
v = _get_value(
type(self),
v,
to_dict=to_dict,
Expand Down
2 changes: 1 addition & 1 deletion pydantic/deprecated/decorator.py
Expand Up @@ -120,7 +120,7 @@ def __init__(self, function: 'AnyCallable', config: 'ConfigType'):
else:
assert p.kind == Parameter.VAR_KEYWORD, p.kind
self.v_kwargs_name = name
fields[name] = Dict[str, annotation], None # type: ignore
fields[name] = Dict[str, annotation], None
takes_kwargs = True

# these checks avoid a clash between "args" and a field with that name
Expand Down
2 changes: 1 addition & 1 deletion pydantic/errors.py
Expand Up @@ -105,7 +105,7 @@ def from_name_error(cls, name_error: NameError) -> Self:
Converted `PydanticUndefinedAnnotation` error.
"""
try:
name = name_error.name # type: ignore # python > 3.10
name = name_error.name # python > 3.10
except AttributeError:
name = re.search(r".*'(.+?)'", str(name_error)).group(1) # type: ignore[union-attr]
return cls(name=name, message=str(name_error))
Expand Down
2 changes: 1 addition & 1 deletion pydantic/functional_validators.py
Expand Up @@ -314,7 +314,7 @@ def field_validator(
code='validator-no-fields',
)
fields = __field, *fields
if not all(isinstance(field, str) for field in fields): # type: ignore
if not all(isinstance(field, str) for field in fields):
raise PydanticUserError(
'`@field_validator` fields should be passed as separate string args. '
"E.g. usage should be `@validator('<field_name_1>', '<field_name_2>', ...)`",
Expand Down
10 changes: 5 additions & 5 deletions pydantic/json_schema.py
Expand Up @@ -1182,7 +1182,7 @@ def _named_required_fields_schema(

json_schema = {'type': 'object', 'properties': properties}
if required_fields:
json_schema['required'] = required_fields # type: ignore
json_schema['required'] = required_fields
return json_schema

def _get_alias_name(self, field: CoreSchemaField, name: str) -> str:
Expand Down Expand Up @@ -1932,7 +1932,7 @@ def update_with_validations(
"""
for core_key, json_schema_key in mapping.items():
if core_key in core_schema:
json_schema[json_schema_key] = core_schema[core_key] # type: ignore[literal-required]
json_schema[json_schema_key] = core_schema[core_key]

class ValidationsMapping:
"""This class just contains mappings from core_schema attribute names to the corresponding
Expand Down Expand Up @@ -2161,15 +2161,15 @@ def _make_json_hashable(value: _Json) -> _HashableJson:


def _sort_json_schema(value: JsonSchemaValue, parent_key: str | None = None) -> JsonSchemaValue:
if isinstance(value, dict): # type: ignore
if isinstance(value, dict):
sorted_dict: dict[str, JsonSchemaValue] = {}
keys = value.keys()
if parent_key != 'properties':
keys = sorted(keys)
for key in keys:
sorted_dict[key] = _sort_json_schema(value[key], parent_key=key)
return sorted_dict # type: ignore
elif isinstance(value, list): # type: ignore
return sorted_dict
elif isinstance(value, list):
sorted_list: list[JsonSchemaValue] = []
for item in value: # type: ignore
sorted_list.append(_sort_json_schema(item))
Expand Down
14 changes: 7 additions & 7 deletions pydantic/main.py
Expand Up @@ -159,7 +159,7 @@ def __init__(__pydantic_self__, **data: Any) -> None: # type: ignore
__pydantic_self__.__pydantic_validator__.validate_python(data, self_instance=__pydantic_self__)

# The following line sets a flag that we use to determine when `__init__` gets overridden by the user
__init__.__pydantic_base_init__ = True # type: ignore
__init__.__pydantic_base_init__ = True

@property
def model_computed_fields(self) -> dict[str, ComputedFieldInfo]:
Expand Down Expand Up @@ -396,7 +396,7 @@ def model_parametrized_name(cls, params: tuple[type[Any], ...]) -> str:
Raises:
TypeError: Raised when trying to generate concrete names for non-generic models.
"""
if not issubclass(cls, typing.Generic): # type: ignore[arg-type]
if not issubclass(cls, typing.Generic):
raise TypeError('Concrete names should only be generated for generic models.')

# Any strings received should represent forward references, so we handle them specially below.
Expand Down Expand Up @@ -1109,7 +1109,7 @@ def copy(
)

values = dict(
_deprecated_copy_internals._iter( # type: ignore
_deprecated_copy_internals._iter(
self, to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False
),
**(update or {}),
Expand Down Expand Up @@ -1200,7 +1200,7 @@ def update_forward_refs(cls, **localns: Any) -> None: # noqa: D102
)
def _iter(self, *args: Any, **kwargs: Any) -> Any:
warnings.warn('The private method `_iter` will be removed and should no longer be used.', DeprecationWarning)
return _deprecated_copy_internals._iter(self, *args, **kwargs) # type: ignore
return _deprecated_copy_internals._iter(self, *args, **kwargs)

@typing_extensions.deprecated(
'The private method `_copy_and_set_values` will be removed and should no longer be used.',
Expand All @@ -1211,7 +1211,7 @@ def _copy_and_set_values(self, *args: Any, **kwargs: Any) -> Any:
'The private method `_copy_and_set_values` will be removed and should no longer be used.',
DeprecationWarning,
)
return _deprecated_copy_internals._copy_and_set_values(self, *args, **kwargs) # type: ignore
return _deprecated_copy_internals._copy_and_set_values(self, *args, **kwargs)

@classmethod
@typing_extensions.deprecated(
Expand All @@ -1222,7 +1222,7 @@ def _get_value(cls, *args: Any, **kwargs: Any) -> Any:
warnings.warn(
'The private method `_get_value` will be removed and should no longer be used.', DeprecationWarning
)
return _deprecated_copy_internals._get_value(cls, *args, **kwargs) # type: ignore
return _deprecated_copy_internals._get_value(cls, *args, **kwargs)

@typing_extensions.deprecated(
'The private method `_calculate_keys` will be removed and should no longer be used.',
Expand All @@ -1232,7 +1232,7 @@ def _calculate_keys(self, *args: Any, **kwargs: Any) -> Any:
warnings.warn(
'The private method `_calculate_keys` will be removed and should no longer be used.', DeprecationWarning
)
return _deprecated_copy_internals._calculate_keys(self, *args, **kwargs) # type: ignore
return _deprecated_copy_internals._calculate_keys(self, *args, **kwargs)


@typing.overload
Expand Down
6 changes: 3 additions & 3 deletions pydantic/networks.py
Expand Up @@ -271,7 +271,7 @@ class IPvAnyAddress:

__slots__ = ()

def __new__(cls, value: Any) -> IPv4Address | IPv6Address: # type: ignore[misc]
def __new__(cls, value: Any) -> IPv4Address | IPv6Address:
"""Validate an IPv4 or IPv6 address."""
try:
return IPv4Address(value)
Expand Down Expand Up @@ -310,7 +310,7 @@ class IPvAnyInterface:

__slots__ = ()

def __new__(cls, value: NetworkType) -> IPv4Interface | IPv6Interface: # type: ignore[misc]
def __new__(cls, value: NetworkType) -> IPv4Interface | IPv6Interface:
"""Validate an IPv4 or IPv6 interface."""
try:
return IPv4Interface(value)
Expand Down Expand Up @@ -349,7 +349,7 @@ class IPvAnyNetwork:

__slots__ = ()

def __new__(cls, value: NetworkType) -> IPv4Network | IPv6Network: # type: ignore[misc]
def __new__(cls, value: NetworkType) -> IPv4Network | IPv6Network:
"""Validate an IPv4 or IPv6 network."""
# Assume IP Network is defined with a default value for `strict` argument.
# Define your own class if you want to specify network address check strictness.
Expand Down
2 changes: 1 addition & 1 deletion pydantic/root_model.py
Expand Up @@ -51,7 +51,7 @@ def __init__(__pydantic_self__, root: RootModelRootType = PydanticUndefined, **d
root = data # type: ignore
__pydantic_self__.__pydantic_validator__.validate_python(root, self_instance=__pydantic_self__)

__init__.__pydantic_base_init__ = True # type: ignore
__init__.__pydantic_base_init__ = True

@classmethod
def model_construct(cls: type[Model], root: RootModelRootType, _fields_set: set[str] | None = None) -> Model:
Expand Down