8000 Use variable annotations by hauntsaninja · Pull Request #10723 · python/mypy · GitHub
[go: up one dir, main page]

Skip to content

Use variable annotations #10723

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Jun 28, 2021
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
run darker to appease flake8
  • Loading branch information
hauntsaninja committed Jun 26, 2021
commit 514c14269ed26454d32f725ddecc0f7c09f0c7dd
9 changes: 5 additions & 4 deletions mypy/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -383,6 +383,7 @@ def load_plugins_from_config(
plugins (for cache validation).
"""
import importlib

snapshot: Dict[str, str] = {}

if not options.config_file:
Expand Down Expand Up @@ -861,13 +862,13 @@ def deps_to_json(x: Dict[str, Set[str]]) -> str:


# File for storing metadata about all the fine-grained dependency caches
DEPS_META_FILE: Final = '@deps.meta.json'
DEPS_META_FILE: Final = "@deps.meta.json"
# File for storing fine-grained dependencies that didn't a parent in the build
DEPS_ROOT_FILE: Final = '@root.deps.json'
DEPS_ROOT_FILE: Final = "@root.deps.json"

# The name of the fake module used to store fine-grained dependencies that
# have no other place to go.
FAKE_ROOT_MODULE: Final = '@root'
FAKE_ROOT_MODULE: Final = "@root"


def write_deps_cache(rdeps: Dict[str, Dict[str, Set[str]]],
Expand Down Expand Up @@ -990,7 +991,7 @@ def generate_deps_for_cache(manager: BuildManager,
return rdeps


PLUGIN_SNAPSHOT_FILE: Final = '@plugins_snapshot.json'
PLUGIN_SNAPSHOT_FILE: Final = "@plugins_snapshot.json"


def write_plugins_snapshot(manager: BuildManager) -> None:
Expand Down
19 changes: 10 additions & 9 deletions mypy/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -1827,10 +1827,14 @@ def check_protocol_variance(self, defn: ClassDef) -> None:
object_type = Instance(info.mro[-1], [])
tvars = info.defn.type_vars
for i, tvar in enumerate(tvars):
up_args: List[Type] = [object_type if i == j else AnyType(TypeOfAny.special_form)
for j, _ in enumerate(tvars)]
down_args: List[Type] = [UninhabitedType() if i == j else AnyType(TypeOfAny.special_form)
for j, _ in enumerate(tvars)]
up_args: List[Type] = [
object_type if i == j else AnyType(TypeOfAny.special_form)
for j, _ in enumerate(tvars)
]
down_args: List[Type] = [
UninhabitedType() if i == j else AnyType(TypeOfAny.special_form)
for j, _ in enumerate(tvars)
]
up, down = Instance(info, up_args), Instance(info, down_args)
# TODO: add advanced variance checks for recursive protocols
if is_subtype(down, up, ignore_declared_variance=True):
Expand Down Expand Up @@ -2629,8 +2633,7 @@ def check_multi_assignment_from_union(self, lvalues: List[Expression], rvalue: E
for binder.
"""
self.no_partial_types = True
transposed: Tuple[List[Type], ...] = tuple([] for _ in
self.flatten_lvalues(lvalues))
transposed: Tuple[List[Type], ...] = tuple([] for _ in self.flatten_lvalues(lvalues))
# Notify binder that we want to defer bindings and instead collect types.
with self.binder.accumulate_type_assignments() as assignments:
for item in rvalue_type.items:
Expand Down Expand Up @@ -5809,9 +5812,7 @@ def group_comparison_operands(pairwise_comparisons: Iterable[Tuple[str, Expressi
This function is currently only used to assist with type-narrowing refinements
and is extracted out to a helper function so we can unit test it.
"""
groups: Dict[str, DisjointDict[Key, int]] = {
op: DisjointDict() for op in operators_to_group
}
groups: Dict[str, DisjointDict[Key, int]] = {op: DisjointDict() for op in operators_to_group}

simplified_operator_list: List[Tuple[str, List[int]]] = []
last_operator: Optional[str] = None
Expand Down
33 changes: 20 additions & 13 deletions mypy/checkexpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,8 +93,12 @@

# Types considered safe for comparisons with --strict-equality due to known behaviour of __eq__.
# NOTE: All these types are subtypes of AbstractSet.
OVERLAPPING_TYPES_WHITELIST: Final = ['builtins.set', 'builtins.frozenset',
'typing.KeysView', 'typing.ItemsView']
OVERLAPPING_TYPES_WHITELIST: Final = [
"builtins.set",
"builtins.frozenset",
"typing.KeysView",
"typing.ItemsView",
]


class TooManyUnions(Exception):
Expand Down Expand Up @@ -590,14 +594,16 @@ def get_partial_self_var(self, expr: MemberExpr) -> Optional[Var]:
return None

# Types and methods that can be used to infer partial types.
item_args: ClassVar[Dict[str, List[str]]] = {'builtins.list': ['append'],
'builtins.set': ['add', 'discard'],
}
container_args: ClassVar[Dict[str, Dict[str, List[str]]]] = {'builtins.list': {'extend': ['builtins.list']},
'builtins.dict': {'update': ['builtins.dict']},
'collections.OrderedDict': {'update': ['builtins.dict']},
'builtins.set': {'update': ['builtins.set', 'builtins.list']},
}
item_args: ClassVar[Dict[str, List[str]]] = {
"builtins.list": ["append"],
"builtins.set": ["add", "discard"],
}
container_args: ClassVar[Dict[str, Dict[str, List[str]]]] = {
"builtins.list": {"extend": ["builtins.list"]},
"builtins.dict": {"update": ["builtins.dict"]},
"collections.OrderedDict": {"update": ["builtins.dict"]},
"builtins.set": {"update": ["builtins.set", "builtins.list"]},
}

def try_infer_partial_type(self, e: CallExpr) -> None:
"""Try to make partial type precise from a call."""
Expand Down Expand Up @@ -3051,9 +3057,10 @@ def visit_typeddict_index_expr(self, td_type: TypedDictType, index: Expression)
value_types.append(value_type)
return make_simplified_union(value_types)

def visit_enum_index_expr(self, enum_type: TypeInfo, index: Expression,
context: Context) -> Type:
string_type: Type = self.named_type('builtins.str')
def visit_enum_index_expr(
self, enum_type: TypeInfo, index: Expression, context: Context
) -> Type:
string_type: Type = self.named_type("builtins.str")
if self.chk.options.python_version[0] < 3:
string_type = UnionType.make_union([string_type,
self.named_type('builtins.unicode')])
Expand Down
40 changes: 27 additions & 13 deletions mypy/checkstrformat.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,24 +95,37 @@ def compile_new_format_re(custom_spec: bool) -> Pattern[str]:
FORMAT_RE: Final = compile_format_re()
FORMAT_RE_NEW: Final = compile_new_format_re(False)
FORMAT_RE_NEW_CUSTOM: Final = compile_new_format_re(True)
DUMMY_FIELD_NAME: Final = '__dummy_name__'
DUMMY_FIELD_NAME: Final = "__dummy_name__"

# Format types supported by str.format() for builtin classes.
SUPPORTED_TYPES_NEW: Final = {'b', 'c', 'd', 'e', 'E', 'f', 'F',
'g', 'G', 'n', 'o', 's', 'x', 'X', '%'}
SUPPORTED_TYPES_NEW: Final = {
"b",
"c",
"d",
"e",
"E",
"f",
"F",
"g",
"G",
"n",
"o",
"s",
"x",
"X",
"%",
}

# Types that require either int or float.
NUMERIC_TYPES_OLD: Final = {'d', 'i', 'o', 'u', 'x', 'X',
'e', 'E', 'f', 'F', 'g', 'G'}
NUMERIC_TYPES_NEW: Final = {'b', 'd', 'o', 'e', 'E', 'f', 'F',
'g', 'G', 'n', 'x', 'X', '%'}
NUMERIC_TYPES_OLD: Final = {"d", "i", "o", "u", "x", "X", "e", "E", "f", "F", "g", "G"}
NUMERIC_TYPES_NEW: Final = {"b", "d", "o", "e", "E", "f", "F", "g", "G", "n", "x", "X", "%"}

# These types accept _only_ int.
REQUIRE_INT_OLD: Final = {'o', 'x', 'X'}
REQUIRE_INT_NEW: Final = {'b', 'd', 'o', 'x', 'X'}
REQUIRE_INT_OLD: Final = {"o", "x", "X"}
REQUIRE_INT_NEW: Final = {"b", "d", "o", "x", "X"}

# These types fall back to SupportsFloat with % (other fall back to SupportsInt)
FLOAT_TYPES: Final = {'e', 'E', 'f', 'F', 'g', 'G'}
FLOAT_TYPES: Final = {"e", "E", "f", "F", "g", "G"}


class ConversionSpecifier:
Expand Down Expand Up @@ -518,9 +531,10 @@ def apply_field_accessors(self, spec: ConversionSpecifier, repl: Expression,

# This is a bit of a dirty trick, but it looks like this is the simplest way.
temp_errors = self.msg.clean_copy().errors
dummy = DUMMY_FIELD_NAME + spec.field[len(spec.key):]
temp_ast: Node = parse(dummy, fnam='<format>', module=None,
options=self.chk.options, errors=temp_errors)
dummy = DUMMY_FIELD_NAME + spec.field[len(spec.key) :]
temp_ast: Node = parse(
dummy, fnam="<format>", module=None, options=self.chk.options, errors=temp_errors
)
if temp_errors.is_errors():
self.msg.fail('Syntax error in format specifier "{}"'.format(spec.field),
ctx, code=codes.STRING_FORMATTING)
Expand Down
50 changes: 30 additions & 20 deletions mypy/defaults.py
F438
Original file line number Diff line number Diff line change
Expand Up @@ -5,31 +5,41 @@
PYTHON2_VERSION: Final = (2, 7)
PYTHON3_VERSION: Final = (3, 6)
PYTHON3_VERSION_MIN: Final = (3, 4)
CACHE_DIR: Final = '.mypy_cache'
CONFIG_FILE: Final = ['mypy.ini', '.mypy.ini']
PYPROJECT_CONFIG_FILES: Final = ['pyproject.toml', ]
SHARED_CONFIG_FILES: Final = ['setup.cfg', ]
USER_CONFIG_FILES: Final = ['~/.config/mypy/config', '~/.mypy.ini', ]
if os.environ.get('XDG_CONFIG_HOME'):
USER_CONFIG_FILES.insert(0, os.path.join(os.environ['XDG_CONFIG_HOME'], 'mypy/config'))
CACHE_DIR: Final = ".mypy_cache"
CONFIG_FILE: Final = ["mypy.ini", ".mypy.ini"]
PYPROJECT_CONFIG_FILES: Final = [
"pyproject.toml",
]
SHARED_CONFIG_FILES: Final = [
"setup.cfg",
]
USER_CONFIG_FILES: Final = [
"~/.config/mypy/config",
"~/.mypy.ini",
]
if os.environ.get("XDG_CONFIG_HOME"):
USER_CONFIG_FILES.insert(0, os.path.join(os.environ["XDG_CONFIG_HOME"], "mypy/config"))

CONFIG_FILES: Final = (CONFIG_FILE + PYPROJECT_CONFIG_FILES + SHARED_CONFIG_FILES +
USER_CONFIG_FILES)
CONFIG_FILES: Final = (
CONFIG_FILE + PYPROJECT_CONFIG_FILES + SHARED_CONFIG_FILES + USER_CONFIG_FILES
)

# This must include all reporters defined in mypy.report. This is defined here
# to make reporter names available without importing mypy.report -- this speeds
# up startup.
REPORTER_NAMES: Final = ['linecount',
'any-exprs',
'linecoverage',
'memory-xml', C24
'cobertura-xml',
'xml',
'xslt-html',
'xslt-txt',
'html',
'txt',
'lineprecision']
REPORTER_NAMES: Final = [
"linecount",
"any-exprs",
"linecoverage",
"memory-xml",
"cobertura-xml",
"xml",
"xslt-html",
"xslt-txt",
"html",
"txt",
"lineprecision",
]

# Threshold after which we sometimes filter out most errors to avoid very
# verbose output
Expand Down
4 changes: 2 additions & 2 deletions mypy/dmypy_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def daemonize(options: Options,

# Server code.

CONNECTION_NAME: Final = 'dmypy'
CONNECTION_NAME: Final = "dmypy"


def process_start_options(flags: List[str], allow_sources: bool) -> Options:
Expand Down Expand Up @@ -868,7 +868,7 @@ def cmd_hang(self) -> Dict[str, object]:
# Misc utilities.


MiB: Final = 2**20
MiB: Final = 2 ** 20


def get_meminfo() -> Dict[str, Any]:
Expand Down
2 changes: 1 addition & 1 deletion mypy/dmypy_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from mypy.ipc import IPCBase

DEFAULT_STATUS_FILE: Final = '.dmypy.json'
DEFAULT_STATUS_FILE: Final = ".dmypy.json"


def receive(connection: IPCBase) -> Any:
Expand Down
Loading
0