code
stringlengths
0
390k
repo_name
stringclasses
1 value
path
stringlengths
12
69
language
stringclasses
1 value
license
stringclasses
1 value
size
int64
0
390k
from __future__ import annotations import os.path provided_prefix = os.getenv("MYPY_TEST_PREFIX", None) if provided_prefix: PREFIX = provided_prefix else: this_file_dir = os.path.dirname(os.path.realpath(__file__)) PREFIX = os.path.dirname(os.path.dirname(this_file_dir)) # Location of test data files such as test case descriptions. test_data_prefix = os.path.join(PREFIX, "test-data", "unit") package_path = os.path.join(PREFIX, "test-data", "packages") # Temp directory used for the temp files created when running test cases. # This is *within* the tempfile.TemporaryDirectory that is chroot'ed per testcase. # It is also hard-coded in numerous places, so don't change it. test_temp_dir = "tmp" # The PEP 561 tests do a bunch of pip installs which, even though they operate # on distinct temporary virtual environments, run into race conditions on shared # file-system state. To make this work reliably in parallel mode, we'll use a # FileLock courtesy of the tox-dev/py-filelock package. # Ref. https://github.com/python/mypy/issues/12615 # Ref. mypy/test/testpep561.py pip_lock = os.path.join(package_path, ".pip_lock") pip_timeout = 60
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/config.py
Python
NOASSERTION
1,160
"""Utilities for processing .test files containing test case descriptions.""" from __future__ import annotations import os import os.path import posixpath import re import shutil import sys import tempfile from abc import abstractmethod from dataclasses import dataclass from pathlib import Path from typing import Any, Final, Iterator, NamedTuple, NoReturn, Pattern, Union from typing_extensions import TypeAlias as _TypeAlias import pytest from mypy import defaults from mypy.test.config import PREFIX, test_data_prefix, test_temp_dir root_dir = os.path.normpath(PREFIX) # Debuggers that we support for debugging mypyc run tests # implementation of using each of these debuggers is in test_run.py # TODO: support more debuggers SUPPORTED_DEBUGGERS: Final = ["gdb", "lldb"] # File modify/create operation: copy module contents from source_path. class UpdateFile(NamedTuple): module: str content: str target_path: str # File delete operation: delete module file. class DeleteFile(NamedTuple): module: str path: str FileOperation: _TypeAlias = Union[UpdateFile, DeleteFile] def _file_arg_to_module(filename: str) -> str: filename, _ = os.path.splitext(filename) parts = filename.split("/") # not os.sep since it comes from test data if parts[-1] == "__init__": parts.pop() return ".".join(parts) def parse_test_case(case: DataDrivenTestCase) -> None: """Parse and prepare a single case from suite with test case descriptions. This method is part of the setup phase, just before the test case is run. """ test_items = parse_test_data(case.data, case.name) base_path = case.suite.base_path if case.suite.native_sep: join = os.path.join else: join = posixpath.join out_section_missing = case.suite.required_out_section files: list[tuple[str, str]] = [] # path and contents output_files: list[tuple[str, str | Pattern[str]]] = [] # output path and contents output: list[str] = [] # Regular output errors output2: dict[int, list[str]] = {} # Output errors for incremental, runs 2+ deleted_paths: dict[int, set[str]] = {} # from run number of paths stale_modules: dict[int, set[str]] = {} # from run number to module names rechecked_modules: dict[int, set[str]] = {} # from run number module names triggered: list[str] = [] # Active triggers (one line per incremental step) targets: dict[int, list[str]] = {} # Fine-grained targets (per fine-grained update) test_modules: list[str] = [] # Modules which are deemed "test" (vs "fixture") def _case_fail(msg: str) -> NoReturn: pytest.fail(f"{case.file}:{case.line}: {msg}", pytrace=False) # Process the parsed items. Each item has a header of form [id args], # optionally followed by lines of text. item = first_item = test_items[0] test_modules.append("__main__") for item in test_items[1:]: def _item_fail(msg: str) -> NoReturn: item_abs_line = case.line + item.line - 2 pytest.fail(f"{case.file}:{item_abs_line}: {msg}", pytrace=False) if item.id in {"file", "fixture", "outfile", "outfile-re"}: # Record an extra file needed for the test case. assert item.arg is not None contents = expand_variables("\n".join(item.data)) path = join(base_path, item.arg) if item.id != "fixture": test_modules.append(_file_arg_to_module(item.arg)) if item.id in {"file", "fixture"}: files.append((path, contents)) elif item.id == "outfile-re": output_files.append((path, re.compile(contents.rstrip(), re.S))) elif item.id == "outfile": output_files.append((path, contents)) elif item.id == "builtins": # Use an alternative stub file for the builtins module. assert item.arg is not None mpath = join(os.path.dirname(case.file), item.arg) with open(mpath, encoding="utf8") as f: files.append((join(base_path, "builtins.pyi"), f.read())) elif item.id == "typing": # Use an alternative stub file for the typing module. assert item.arg is not None src_path = join(os.path.dirname(case.file), item.arg) with open(src_path, encoding="utf8") as f: files.append((join(base_path, "typing.pyi"), f.read())) elif item.id == "_typeshed": # Use an alternative stub file for the _typeshed module. assert item.arg is not None src_path = join(os.path.dirname(case.file), item.arg) with open(src_path, encoding="utf8") as f: files.append((join(base_path, "_typeshed.pyi"), f.read())) elif re.match(r"stale[0-9]*$", item.id): passnum = 1 if item.id == "stale" else int(item.id[len("stale") :]) assert passnum > 0 modules = set() if item.arg is None else {t.strip() for t in item.arg.split(",")} stale_modules[passnum] = modules elif re.match(r"rechecked[0-9]*$", item.id): passnum = 1 if item.id == "rechecked" else int(item.id[len("rechecked") :]) assert passnum > 0 modules = set() if item.arg is None else {t.strip() for t in item.arg.split(",")} rechecked_modules[passnum] = modules elif re.match(r"targets[0-9]*$", item.id): passnum = 1 if item.id == "targets" else int(item.id[len("targets") :]) assert passnum > 0 reprocessed = [] if item.arg is None else [t.strip() for t in item.arg.split(",")] targets[passnum] = reprocessed elif item.id == "delete": # File/directory to delete during a multi-step test case assert item.arg is not None m = re.match(r"(.*)\.([0-9]+)$", item.arg) if m is None: _item_fail(f"Invalid delete section {item.arg!r}") num = int(m.group(2)) if num < 2: _item_fail(f"Can't delete during step {num}") full = join(base_path, m.group(1)) deleted_paths.setdefault(num, set()).add(full) elif re.match(r"out[0-9]*$", item.id): if item.arg is None: args = [] else: args = item.arg.split(",") version_check = True for arg in args: if arg.startswith("version"): compare_op = arg[7:9] if compare_op not in {">=", "=="}: _item_fail("Only >= and == version checks are currently supported") version_str = arg[9:] try: version = tuple(int(x) for x in version_str.split(".")) except ValueError: _item_fail(f"{version_str!r} is not a valid python version") if compare_op == ">=": if version <= defaults.PYTHON3_VERSION: _item_fail( f"{arg} always true since minimum runtime version is {defaults.PYTHON3_VERSION}" ) version_check = sys.version_info >= version elif compare_op == "==": if version < defaults.PYTHON3_VERSION: _item_fail( f"{arg} always false since minimum runtime version is {defaults.PYTHON3_VERSION}" ) if not 1 < len(version) < 4: _item_fail( f'Only minor or patch version checks are currently supported with "==": {version_str!r}' ) version_check = sys.version_info[: len(version)] == version if version_check: tmp_output = [expand_variables(line) for line in item.data] if os.path.sep == "\\" and case.normalize_output: tmp_output = [fix_win_path(line) for line in tmp_output] if item.id == "out" or item.id == "out1": output = tmp_output else: passnum = int(item.id[len("out") :]) assert passnum > 1 output2[passnum] = tmp_output out_section_missing = False elif item.id == "triggered" and item.arg is None: triggered = item.data else: section_str = item.id + (f" {item.arg}" if item.arg else "") _item_fail(f"Invalid section header [{section_str}] in case {case.name!r}") if out_section_missing: _case_fail(f"Required output section not found in case {case.name!r}") for passnum in stale_modules.keys(): if passnum not in rechecked_modules: # If the set of rechecked modules isn't specified, make it the same as the set # of modules with a stale public interface. rechecked_modules[passnum] = stale_modules[passnum] if ( passnum in stale_modules and passnum in rechecked_modules and not stale_modules[passnum].issubset(rechecked_modules[passnum]) ): _case_fail(f"Stale modules after pass {passnum} must be a subset of rechecked modules") output_inline_start = len(output) input = first_item.data expand_errors(input, output, "main") for file_path, contents in files: expand_errors(contents.split("\n"), output, file_path) seen_files = set() for file, _ in files: if file in seen_files: _case_fail(f"Duplicated filename {file}. Did you include it multiple times?") seen_files.add(file) case.input = input case.output = output case.output_inline_start = output_inline_start case.output2 = output2 case.last_line = case.line + item.line + len(item.data) - 2 case.files = files case.output_files = output_files case.expected_stale_modules = stale_modules case.expected_rechecked_modules = rechecked_modules case.deleted_paths = deleted_paths case.triggered = triggered or [] case.expected_fine_grained_targets = targets case.test_modules = test_modules class DataDrivenTestCase(pytest.Item): """Holds parsed data-driven test cases, and handles directory setup and teardown.""" # Override parent member type parent: DataSuiteCollector input: list[str] output: list[str] # Output for the first pass output_inline_start: int output2: dict[int, list[str]] # Output for runs 2+, indexed by run number # full path of test suite file = "" line = 0 # (file path, file content) tuples files: list[tuple[str, str]] # Modules which is to be considered "test" rather than "fixture" test_modules: list[str] expected_stale_modules: dict[int, set[str]] expected_rechecked_modules: dict[int, set[str]] expected_fine_grained_targets: dict[int, list[str]] # Whether or not we should normalize the output to standardize things like # forward vs backward slashes in file paths for Windows vs Linux. normalize_output: bool # Extra attributes used by some tests. last_line: int output_files: list[tuple[str, str | Pattern[str]]] # Path and contents for output files deleted_paths: dict[int, set[str]] # Mapping run number -> paths triggered: list[str] # Active triggers (one line per incremental step) def __init__( self, parent: DataSuiteCollector, suite: DataSuite, *, file: str, name: str, writescache: bool, only_when: str, normalize_output: bool, platform: str | None, skip: bool, xfail: bool, data: str, line: int, ) -> None: super().__init__(name, parent) self.suite = suite self.file = file self.writescache = writescache self.only_when = only_when self.normalize_output = normalize_output if (platform == "windows" and sys.platform != "win32") or ( platform == "posix" and sys.platform == "win32" ): skip = True self.skip = skip self.xfail = xfail self.data = data self.line = line self.old_cwd: str | None = None self.tmpdir: tempfile.TemporaryDirectory[str] | None = None def runtest(self) -> None: if self.skip: pytest.skip() # TODO: add a better error message for when someone uses skip and xfail at the same time elif self.xfail: self.add_marker(pytest.mark.xfail) parent = self.getparent(DataSuiteCollector) assert parent is not None, "Should not happen" suite = parent.obj() suite.setup() try: suite.run_case(self) except Exception: # As a debugging aid, support copying the contents of the tmp directory somewhere save_dir: str | None = self.config.getoption("--save-failures-to", None) if save_dir: assert self.tmpdir is not None target_dir = os.path.join(save_dir, os.path.basename(self.tmpdir.name)) print(f"Copying data from test {self.name} to {target_dir}") if not os.path.isabs(target_dir): assert self.old_cwd target_dir = os.path.join(self.old_cwd, target_dir) shutil.copytree(self.tmpdir.name, target_dir) raise def setup(self) -> None: parse_test_case(case=self) self.old_cwd = os.getcwd() self.tmpdir = tempfile.TemporaryDirectory(prefix="mypy-test-") os.chdir(self.tmpdir.name) os.mkdir(test_temp_dir) # Precalculate steps for find_steps() steps: dict[int, list[FileOperation]] = {} for path, content in self.files: m = re.match(r".*\.([0-9]+)$", path) if m: # Skip writing subsequent incremental steps - rather # store them as operations. num = int(m.group(1)) assert num >= 2 target_path = re.sub(r"\.[0-9]+$", "", path) module = module_from_path(target_path) operation = UpdateFile(module, content, target_path) steps.setdefault(num, []).append(operation) else: # Write the first incremental steps dir = os.path.dirname(path) os.makedirs(dir, exist_ok=True) with open(path, "w", encoding="utf8") as f: f.write(content) for num, paths in self.deleted_paths.items(): assert num >= 2 for path in paths: module = module_from_path(path) steps.setdefault(num, []).append(DeleteFile(module, path)) max_step = max(steps) if steps else 2 self.steps = [steps.get(num, []) for num in range(2, max_step + 1)] def teardown(self) -> None: if self.old_cwd is not None: os.chdir(self.old_cwd) if self.tmpdir is not None: try: self.tmpdir.cleanup() except OSError: pass self.old_cwd = None self.tmpdir = None def reportinfo(self) -> tuple[str, int, str]: return self.file, self.line, self.name def repr_failure( self, excinfo: pytest.ExceptionInfo[BaseException], style: Any | None = None ) -> str: excrepr: object if isinstance(excinfo.value, SystemExit): # We assume that before doing exit() (which raises SystemExit) we've printed # enough context about what happened so that a stack trace is not useful. # In particular, uncaught exceptions during semantic analysis or type checking # call exit() and they already print out a stack trace. excrepr = excinfo.exconly() elif isinstance(excinfo.value, pytest.fail.Exception) and not excinfo.value.pytrace: excrepr = excinfo.exconly() else: excinfo.traceback = self.parent._traceback_filter(excinfo) excrepr = excinfo.getrepr(style="short") return f"data: {self.file}:{self.line}:\n{excrepr}" def find_steps(self) -> list[list[FileOperation]]: """Return a list of descriptions of file operations for each incremental step. The first list item corresponds to the first incremental step, the second for the second step, etc. Each operation can either be a file modification/creation (UpdateFile) or deletion (DeleteFile). Defaults to having two steps if there aern't any operations. """ return self.steps def module_from_path(path: str) -> str: path = re.sub(r"\.pyi?$", "", path) # We can have a mix of Unix-style and Windows-style separators. parts = re.split(r"[/\\]", path) del parts[0] module = ".".join(parts) module = re.sub(r"\.__init__$", "", module) return module @dataclass class TestItem: """Parsed test caseitem. An item is of the form [id arg] .. data .. """ id: str arg: str | None # Processed, collapsed text data data: list[str] # Start line: 1-based, inclusive, relative to testcase line: int # End line: 1-based, exclusive, relative to testcase; not same as `line + len(test_item.data)` due to collapsing end_line: int @property def trimmed_newlines(self) -> int: # compensates for strip_list return self.end_line - self.line - len(self.data) def parse_test_data(raw_data: str, name: str) -> list[TestItem]: """Parse a list of lines that represent a sequence of test items.""" lines = ["", "[case " + name + "]"] + raw_data.split("\n") ret: list[TestItem] = [] data: list[str] = [] id: str | None = None arg: str | None = None i = 0 i0 = 0 while i < len(lines): s = lines[i].strip() if lines[i].startswith("[") and s.endswith("]"): if id: data = collapse_line_continuation(data) data = strip_list(data) ret.append(TestItem(id, arg, data, i0 + 1, i)) i0 = i id = s[1:-1] arg = None if " " in id: arg = id[id.index(" ") + 1 :] id = id[: id.index(" ")] data = [] elif lines[i].startswith("\\["): data.append(lines[i][1:]) elif not lines[i].startswith("--"): data.append(lines[i]) elif lines[i].startswith("----"): data.append(lines[i][2:]) i += 1 # Process the last item. if id: data = collapse_line_continuation(data) data = strip_list(data) ret.append(TestItem(id, arg, data, i0 + 1, i - 1)) return ret def strip_list(l: list[str]) -> list[str]: """Return a stripped copy of l. Strip whitespace at the end of all lines, and strip all empty lines from the end of the array. """ r: list[str] = [] for s in l: # Strip spaces at end of line r.append(re.sub(r"\s+$", "", s)) while r and r[-1] == "": r.pop() return r def collapse_line_continuation(l: list[str]) -> list[str]: r: list[str] = [] cont = False for s in l: ss = re.sub(r"\\$", "", s) if cont: r[-1] += re.sub("^ +", "", ss) else: r.append(ss) cont = s.endswith("\\") return r def expand_variables(s: str) -> str: return s.replace("<ROOT>", root_dir) def expand_errors(input: list[str], output: list[str], fnam: str) -> None: """Transform comments such as '# E: message' or '# E:3: message' in input. The result is lines like 'fnam:line: error: message'. """ for i in range(len(input)): # The first in the split things isn't a comment for possible_err_comment in input[i].split(" # ")[1:]: m = re.search( r"^([ENW]):((?P<col>\d+):)? (?P<message>.*)$", possible_err_comment.strip() ) if m: if m.group(1) == "E": severity = "error" elif m.group(1) == "N": severity = "note" elif m.group(1) == "W": severity = "warning" col = m.group("col") message = m.group("message") message = message.replace("\\#", "#") # adds back escaped # character if col is None: output.append(f"{fnam}:{i + 1}: {severity}: {message}") else: output.append(f"{fnam}:{i + 1}:{col}: {severity}: {message}") def fix_win_path(line: str) -> str: r"""Changes Windows paths to Linux paths in error messages. E.g. foo\bar.py -> foo/bar.py. """ line = line.replace(root_dir, root_dir.replace("\\", "/")) m = re.match(r"^([\S/]+):(\d+:)?(\s+.*)", line) if not m: return line else: filename, lineno, message = m.groups() return "{}:{}{}".format(filename.replace("\\", "/"), lineno or "", message) def fix_cobertura_filename(line: str) -> str: r"""Changes filename paths to Linux paths in Cobertura output files. E.g. filename="pkg\subpkg\a.py" -> filename="pkg/subpkg/a.py". """ m = re.search(r'<class .* filename="(?P<filename>.*?)"', line) if not m: return line return "{}{}{}".format( line[: m.start(1)], m.group("filename").replace("\\", "/"), line[m.end(1) :] ) ## # # pytest setup # ## # This function name is special to pytest. See # https://docs.pytest.org/en/latest/reference.html#initialization-hooks def pytest_addoption(parser: Any) -> None: group = parser.getgroup("mypy") group.addoption( "--update-data", action="store_true", default=False, help="Update test data to reflect actual output (supported only for certain tests)", ) group.addoption( "--save-failures-to", default=None, help="Copy the temp directories from failing tests to a target directory", ) group.addoption( "--mypy-verbose", action="count", help="Set the verbose flag when creating mypy Options" ) group.addoption( "--mypyc-showc", action="store_true", default=False, help="Display C code on mypyc test failures", ) group.addoption( "--mypyc-debug", default=None, dest="debugger", choices=SUPPORTED_DEBUGGERS, help="Run the first mypyc run test with the specified debugger", ) @pytest.hookimpl(tryfirst=True) def pytest_cmdline_main(config: pytest.Config) -> None: if config.getoption("--collectonly"): return # --update-data is not compatible with parallelized tests, disable parallelization if config.getoption("--update-data"): config.option.numprocesses = 0 # This function name is special to pytest. See # https://doc.pytest.org/en/latest/how-to/writing_plugins.html#collection-hooks def pytest_pycollect_makeitem(collector: Any, name: str, obj: object) -> Any | None: """Called by pytest on each object in modules configured in conftest.py files. collector is pytest.Collector, returns Optional[pytest.Class] """ if isinstance(obj, type): # Only classes derived from DataSuite contain test cases, not the DataSuite class itself if issubclass(obj, DataSuite) and obj is not DataSuite: # Non-None result means this obj is a test case. # The collect method of the returned DataSuiteCollector instance will be called later, # with self.obj being obj. return DataSuiteCollector.from_parent(parent=collector, name=name) return None _case_name_pattern = re.compile( r"(?P<name>[a-zA-Z_0-9]+)" r"(?P<writescache>-writescache)?" r"(?P<only_when>-only_when_cache|-only_when_nocache)?" r"(?P<skip_path_normalization>-skip_path_normalization)?" r"(-(?P<platform>posix|windows))?" r"(?P<skip>-skip)?" r"(?P<xfail>-xfail)?" ) def split_test_cases( parent: DataFileCollector, suite: DataSuite, file: str ) -> Iterator[DataDrivenTestCase]: """Iterate over raw test cases in file, at collection time, ignoring sub items. The collection phase is slow, so any heavy processing should be deferred to after uninteresting tests are filtered (when using -k PATTERN switch). """ with open(file, encoding="utf-8") as f: data = f.read() cases = re.split(r"^\[case ([^]+)]+)\][ \t]*$\n", data, flags=re.DOTALL | re.MULTILINE) cases_iter = iter(cases) line_no = next(cases_iter).count("\n") + 1 test_names = set() for case_id in cases_iter: data = next(cases_iter) m = _case_name_pattern.fullmatch(case_id) if not m: raise RuntimeError(f"Invalid testcase id {case_id!r}") name = m.group("name") if name in test_names: raise RuntimeError( 'Found a duplicate test name "{}" in {} on line {}'.format( name, parent.name, line_no ) ) yield DataDrivenTestCase.from_parent( parent=parent, suite=suite, file=file, name=add_test_name_suffix(name, suite.test_name_suffix), writescache=bool(m.group("writescache")), only_when=m.group("only_when"), platform=m.group("platform"), skip=bool(m.group("skip")), xfail=bool(m.group("xfail")), normalize_output=not m.group("skip_path_normalization"), data=data, line=line_no, ) line_no += data.count("\n") + 1 # Record existing tests to prevent duplicates: test_names.update({name}) class DataSuiteCollector(pytest.Class): def collect(self) -> Iterator[DataFileCollector]: """Called by pytest on each of the object returned from pytest_pycollect_makeitem""" # obj is the object for which pytest_pycollect_makeitem returned self. suite: DataSuite = self.obj assert os.path.isdir( suite.data_prefix ), f"Test data prefix ({suite.data_prefix}) not set correctly" for data_file in suite.files: yield DataFileCollector.from_parent(parent=self, name=data_file) class DataFileFix(NamedTuple): lineno: int # 1-offset, inclusive end_lineno: int # 1-offset, exclusive lines: list[str] class DataFileCollector(pytest.Collector): """Represents a single `.test` data driven test file. More context: https://github.com/python/mypy/issues/11662 """ parent: DataSuiteCollector _fixes: list[DataFileFix] @classmethod # We have to fight with pytest here: def from_parent( cls, parent: DataSuiteCollector, *, name: str # type: ignore[override] ) -> DataFileCollector: collector = super().from_parent(parent, name=name) assert isinstance(collector, DataFileCollector) return collector def collect(self) -> Iterator[DataDrivenTestCase]: yield from split_test_cases( parent=self, suite=self.parent.obj, file=os.path.join(self.parent.obj.data_prefix, self.name), ) def setup(self) -> None: super().setup() self._fixes = [] def teardown(self) -> None: super().teardown() self._apply_fixes() def enqueue_fix(self, fix: DataFileFix) -> None: self._fixes.append(fix) def _apply_fixes(self) -> None: if not self._fixes: return data_path = Path(self.parent.obj.data_prefix) / self.name lines = data_path.read_text().split("\n") # start from end to prevent line offsets from shifting as we update for fix in sorted(self._fixes, reverse=True): lines[fix.lineno - 1 : fix.end_lineno - 1] = fix.lines data_path.write_text("\n".join(lines)) def add_test_name_suffix(name: str, suffix: str) -> str: # Find magic suffix of form "-foobar" (used for things like "-skip"). m = re.search(r"-[-A-Za-z0-9]+$", name) if m: # Insert suite-specific test name suffix before the magic suffix # which must be the last thing in the test case name since we # are using endswith() checks. magic_suffix = m.group(0) return name[: -len(magic_suffix)] + suffix + magic_suffix else: return name + suffix def is_incremental(testcase: DataDrivenTestCase) -> bool: return "incremental" in testcase.name.lower() or "incremental" in testcase.file def has_stable_flags(testcase: DataDrivenTestCase) -> bool: if any(re.match(r"# flags[2-9]:", line) for line in testcase.input): return False for filename, contents in testcase.files: if os.path.basename(filename).startswith("mypy.ini."): return False return True class DataSuite: # option fields - class variables files: list[str] base_path = test_temp_dir # Allow external users of the test code to override the data prefix data_prefix = test_data_prefix required_out_section = False native_sep = False # Name suffix automatically added to each test case in the suite (can be # used to distinguish test cases in suites that share data files) test_name_suffix = "" def setup(self) -> None: """Setup fixtures (ad-hoc)""" @abstractmethod def run_case(self, testcase: DataDrivenTestCase) -> None: raise NotImplementedError
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/data.py
Python
NOASSERTION
29,951
from __future__ import annotations import contextlib import difflib import os import pathlib import re import shutil import sys import time from typing import IO, Any, Callable, Iterable, Iterator, Pattern # Exporting Suite as alias to TestCase for backwards compatibility # TODO: avoid aliasing - import and subclass TestCase directly from unittest import TestCase Suite = TestCase # re-exporting import pytest import mypy.api as api import mypy.version from mypy import defaults from mypy.main import process_options from mypy.options import Options from mypy.test.config import test_data_prefix, test_temp_dir from mypy.test.data import DataDrivenTestCase, DeleteFile, UpdateFile, fix_cobertura_filename skip = pytest.mark.skip # AssertStringArraysEqual displays special line alignment helper messages if # the first different line has at least this many characters, MIN_LINE_LENGTH_FOR_ALIGNMENT = 5 def run_mypy(args: list[str]) -> None: __tracebackhide__ = True # We must enable site packages even though they could cause problems, # since stubs for typing_extensions live there. outval, errval, status = api.run(args + ["--show-traceback", "--no-silence-site-packages"]) if status != 0: sys.stdout.write(outval) sys.stderr.write(errval) pytest.fail(reason="Sample check failed", pytrace=False) def diff_ranges( left: list[str], right: list[str] ) -> tuple[list[tuple[int, int]], list[tuple[int, int]]]: seq = difflib.SequenceMatcher(None, left, right) # note last triple is a dummy, so don't need to worry blocks = seq.get_matching_blocks() i = 0 j = 0 left_ranges = [] right_ranges = [] for block in blocks: # mismatched range left_ranges.append((i, block.a)) right_ranges.append((j, block.b)) i = block.a + block.size j = block.b + block.size # matched range left_ranges.append((block.a, i)) right_ranges.append((block.b, j)) return left_ranges, right_ranges def render_diff_range( ranges: list[tuple[int, int]], content: list[str], *, colour: str | None = None, output: IO[str] = sys.stderr, indent: int = 2, ) -> None: for i, line_range in enumerate(ranges): is_matching = i % 2 == 1 lines = content[line_range[0] : line_range[1]] for j, line in enumerate(lines): if ( is_matching # elide the middle of matching blocks and j >= 3 and j < len(lines) - 3 ): if j == 3: output.write(" " * indent + "...\n") continue if not is_matching and colour: output.write(colour) output.write(" " * indent + line) if not is_matching: if colour: output.write("\033[0m") output.write(" (diff)") output.write("\n") def assert_string_arrays_equal( expected: list[str], actual: list[str], msg: str, *, traceback: bool = False ) -> None: """Assert that two string arrays are equal. Display any differences in a human-readable form. """ actual = clean_up(actual) if expected != actual: expected_ranges, actual_ranges = diff_ranges(expected, actual) sys.stderr.write("Expected:\n") red = "\033[31m" if sys.platform != "win32" else None render_diff_range(expected_ranges, expected, colour=red) sys.stderr.write("Actual:\n") green = "\033[32m" if sys.platform != "win32" else None render_diff_range(actual_ranges, actual, colour=green) sys.stderr.write("\n") first_diff = next( (i for i, (a, b) in enumerate(zip(expected, actual)) if a != b), max(len(expected), len(actual)), ) if 0 <= first_diff < len(actual) and ( len(expected[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT or len(actual[first_diff]) >= MIN_LINE_LENGTH_FOR_ALIGNMENT ): # Display message that helps visualize the differences between two # long lines. show_align_message(expected[first_diff], actual[first_diff]) sys.stderr.write( "Update the test output using --update-data " "(implies -n0; you can additionally use the -k selector to update only specific tests)\n" ) pytest.fail(msg, pytrace=traceback) def assert_module_equivalence(name: str, expected: Iterable[str], actual: Iterable[str]) -> None: expected_normalized = sorted(expected) actual_normalized = sorted(set(actual).difference({"__main__"})) assert_string_arrays_equal( expected_normalized, actual_normalized, ('Actual modules ({}) do not match expected modules ({}) for "[{} ...]"').format( ", ".join(actual_normalized), ", ".join(expected_normalized), name ), ) def assert_target_equivalence(name: str, expected: list[str], actual: list[str]) -> None: """Compare actual and expected targets (order sensitive).""" assert_string_arrays_equal( expected, actual, ('Actual targets ({}) do not match expected targets ({}) for "[{} ...]"').format( ", ".join(actual), ", ".join(expected), name ), ) def show_align_message(s1: str, s2: str) -> None: """Align s1 and s2 so that the their first difference is highlighted. For example, if s1 is 'foobar' and s2 is 'fobar', display the following lines: E: foobar A: fobar ^ If s1 and s2 are long, only display a fragment of the strings around the first difference. If s1 is very short, do nothing. """ # Seeing what went wrong is trivial even without alignment if the expected # string is very short. In this case do nothing to simplify output. if len(s1) < 4: return maxw = 72 # Maximum number of characters shown sys.stderr.write("Alignment of first line difference:\n") trunc = False while s1[:30] == s2[:30]: s1 = s1[10:] s2 = s2[10:] trunc = True if trunc: s1 = "..." + s1 s2 = "..." + s2 max_len = max(len(s1), len(s2)) extra = "" if max_len > maxw: extra = "..." # Write a chunk of both lines, aligned. sys.stderr.write(f" E: {s1[:maxw]}{extra}\n") sys.stderr.write(f" A: {s2[:maxw]}{extra}\n") # Write an indicator character under the different columns. sys.stderr.write(" ") for j in range(min(maxw, max(len(s1), len(s2)))): if s1[j : j + 1] != s2[j : j + 1]: sys.stderr.write("^") # Difference break else: sys.stderr.write(" ") # Equal sys.stderr.write("\n") def clean_up(a: list[str]) -> list[str]: """Remove common directory prefix from all strings in a. This uses a naive string replace; it seems to work well enough. Also remove trailing carriage returns. """ res = [] pwd = os.getcwd() driver = pwd + "/driver.py" for s in a: prefix = os.sep ss = s for p in prefix, prefix.replace(os.sep, "/"): if p != "/" and p != "//" and p != "\\" and p != "\\\\": ss = ss.replace(p, "") # Ignore spaces at end of line. ss = re.sub(" +$", "", ss) # Remove pwd from driver.py's path ss = ss.replace(driver, "driver.py") res.append(re.sub("\\r$", "", ss)) return res @contextlib.contextmanager def local_sys_path_set() -> Iterator[None]: """Temporary insert current directory into sys.path. This can be used by test cases that do runtime imports, for example by the stubgen tests. """ old_sys_path = sys.path.copy() if not ("" in sys.path or "." in sys.path): sys.path.insert(0, "") try: yield finally: sys.path = old_sys_path def testfile_pyversion(path: str) -> tuple[int, int]: m = re.search(r"python3([0-9]+)\.test$", path) if m: return 3, int(m.group(1)) else: return defaults.PYTHON3_VERSION def normalize_error_messages(messages: list[str]) -> list[str]: """Translate an array of error messages to use / as path separator.""" a = [] for m in messages: a.append(m.replace(os.sep, "/")) return a def retry_on_error(func: Callable[[], Any], max_wait: float = 1.0) -> None: """Retry callback with exponential backoff when it raises OSError. If the function still generates an error after max_wait seconds, propagate the exception. This can be effective against random file system operation failures on Windows. """ t0 = time.time() wait_time = 0.01 while True: try: func() return except OSError: wait_time = min(wait_time * 2, t0 + max_wait - time.time()) if wait_time <= 0.01: # Done enough waiting, the error seems persistent. raise time.sleep(wait_time) def good_repr(obj: object) -> str: if isinstance(obj, str): if obj.count("\n") > 1: bits = ["'''\\"] for line in obj.split("\n"): # force repr to use ' not ", then cut it off bits.append(repr('"' + line)[2:-1]) bits[-1] += "'''" return "\n".join(bits) return repr(obj) def assert_equal(a: object, b: object, fmt: str = "{} != {}") -> None: __tracebackhide__ = True if a != b: raise AssertionError(fmt.format(good_repr(a), good_repr(b))) def typename(t: type) -> str: if "." in str(t): return str(t).split(".")[-1].rstrip("'>") else: return str(t)[8:-2] def assert_type(typ: type, value: object) -> None: __tracebackhide__ = True if type(value) != typ: raise AssertionError(f"Invalid type {typename(type(value))}, expected {typename(typ)}") def parse_options( program_text: str, testcase: DataDrivenTestCase, incremental_step: int ) -> Options: """Parse comments like '# flags: --foo' in a test case.""" options = Options() flags = re.search("# flags: (.*)$", program_text, flags=re.MULTILINE) if incremental_step > 1: flags2 = re.search(f"# flags{incremental_step}: (.*)$", program_text, flags=re.MULTILINE) if flags2: flags = flags2 if flags: flag_list = flags.group(1).split() flag_list.append("--no-site-packages") # the tests shouldn't need an installed Python targets, options = process_options(flag_list, require_targets=False) if targets: # TODO: support specifying targets via the flags pragma raise RuntimeError("Specifying targets via the flags pragma is not supported.") if "--show-error-codes" not in flag_list: options.hide_error_codes = True else: flag_list = [] options = Options() options.error_summary = False options.hide_error_codes = True options.force_uppercase_builtins = True options.force_union_syntax = True # Allow custom python version to override testfile_pyversion. if all(flag.split("=")[0] != "--python-version" for flag in flag_list): options.python_version = testfile_pyversion(testcase.file) if testcase.config.getoption("--mypy-verbose"): options.verbosity = testcase.config.getoption("--mypy-verbose") return options def split_lines(*streams: bytes) -> list[str]: """Returns a single list of string lines from the byte streams in args.""" return [s for stream in streams for s in stream.decode("utf8").splitlines()] def write_and_fudge_mtime(content: str, target_path: str) -> None: # In some systems, mtime has a resolution of 1 second which can # cause annoying-to-debug issues when a file has the same size # after a change. We manually set the mtime to circumvent this. # Note that we increment the old file's mtime, which guarantees a # different value, rather than incrementing the mtime after the # copy, which could leave the mtime unchanged if the old file had # a similarly fudged mtime. new_time = None if os.path.isfile(target_path): new_time = os.stat(target_path).st_mtime + 1 dir = os.path.dirname(target_path) os.makedirs(dir, exist_ok=True) with open(target_path, "w", encoding="utf-8") as target: target.write(content) if new_time: os.utime(target_path, times=(new_time, new_time)) def perform_file_operations(operations: list[UpdateFile | DeleteFile]) -> None: for op in operations: if isinstance(op, UpdateFile): # Modify/create file write_and_fudge_mtime(op.content, op.target_path) else: # Delete file/directory if os.path.isdir(op.path): # Sanity check to avoid unexpected deletions assert op.path.startswith("tmp") shutil.rmtree(op.path) else: # Use retries to work around potential flakiness on Windows (AppVeyor). path = op.path retry_on_error(lambda: os.remove(path)) def check_test_output_files( testcase: DataDrivenTestCase, step: int, strip_prefix: str = "" ) -> None: for path, expected_content in testcase.output_files: if path.startswith(strip_prefix): path = path[len(strip_prefix) :] if not os.path.exists(path): raise AssertionError( "Expected file {} was not produced by test case{}".format( path, " on step %d" % step if testcase.output2 else "" ) ) with open(path, encoding="utf8") as output_file: actual_output_content = output_file.read() if isinstance(expected_content, Pattern): if expected_content.fullmatch(actual_output_content) is not None: continue raise AssertionError( "Output file {} did not match its expected output pattern\n---\n{}\n---".format( path, actual_output_content ) ) normalized_output = normalize_file_output( actual_output_content.splitlines(), os.path.abspath(test_temp_dir) ) # We always normalize things like timestamp, but only handle operating-system # specific things if requested. if testcase.normalize_output: if testcase.suite.native_sep and os.path.sep == "\\": normalized_output = [fix_cobertura_filename(line) for line in normalized_output] normalized_output = normalize_error_messages(normalized_output) assert_string_arrays_equal( expected_content.splitlines(), normalized_output, "Output file {} did not match its expected output{}".format( path, " on step %d" % step if testcase.output2 else "" ), ) def normalize_file_output(content: list[str], current_abs_path: str) -> list[str]: """Normalize file output for comparison.""" timestamp_regex = re.compile(r"\d{10}") result = [x.replace(current_abs_path, "$PWD") for x in content] version = mypy.version.__version__ result = [re.sub(r"\b" + re.escape(version) + r"\b", "$VERSION", x) for x in result] # We generate a new mypy.version when building mypy wheels that # lacks base_version, so handle that case. base_version = getattr(mypy.version, "base_version", version) result = [re.sub(r"\b" + re.escape(base_version) + r"\b", "$VERSION", x) for x in result] result = [timestamp_regex.sub("$TIMESTAMP", x) for x in result] return result def find_test_files(pattern: str, exclude: list[str] | None = None) -> list[str]: return [ path.name for path in (pathlib.Path(test_data_prefix).rglob(pattern)) if path.name not in (exclude or []) ]
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/helpers.py
Python
NOASSERTION
16,067
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/meta/__init__.py
Python
NOASSERTION
0
import shlex import subprocess import sys import textwrap import uuid from dataclasses import dataclass from pathlib import Path from typing import Iterable from mypy.test.config import test_data_prefix @dataclass class PytestResult: input: str input_updated: str # any updates made by --update-data stdout: str stderr: str def dedent_docstring(s: str) -> str: return textwrap.dedent(s).lstrip() def run_pytest_data_suite( data_suite: str, *, data_file_prefix: str = "check", pytest_node_prefix: str = "mypy/test/testcheck.py::TypeCheckSuite", extra_args: Iterable[str], max_attempts: int, ) -> PytestResult: """ Runs a suite of data test cases through pytest until either tests pass or until a maximum number of attempts (needed for incremental tests). :param data_suite: the actual "suite" i.e. the contents of a .test file """ p_test_data = Path(test_data_prefix) p_root = p_test_data.parent.parent p = p_test_data / f"{data_file_prefix}-meta-{uuid.uuid4()}.test" assert not p.exists() data_suite = dedent_docstring(data_suite) try: p.write_text(data_suite) test_nodeid = f"{pytest_node_prefix}::{p.name}" extra_args = [sys.executable, "-m", "pytest", "-n", "0", "-s", *extra_args, test_nodeid] cmd = shlex.join(extra_args) for i in range(max_attempts - 1, -1, -1): print(f">> {cmd}") proc = subprocess.run(extra_args, capture_output=True, check=False, cwd=p_root) if proc.returncode == 0: break prefix = "NESTED PYTEST STDOUT" for line in proc.stdout.decode().splitlines(): print(f"{prefix}: {line}") prefix = " " * len(prefix) prefix = "NESTED PYTEST STDERR" for line in proc.stderr.decode().splitlines(): print(f"{prefix}: {line}") prefix = " " * len(prefix) print(f"Exit code {proc.returncode} ({i} attempts remaining)") return PytestResult( input=data_suite, input_updated=p.read_text(), stdout=proc.stdout.decode(), stderr=proc.stderr.decode(), ) finally: p.unlink()
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/meta/_pytest.py
Python
NOASSERTION
2,267
import io from mypy.test.helpers import Suite, diff_ranges, render_diff_range class DiffHelperSuite(Suite): def test_render_diff_range(self) -> None: expected = ["hello", "world"] actual = ["goodbye", "world"] expected_ranges, actual_ranges = diff_ranges(expected, actual) output = io.StringIO() render_diff_range(expected_ranges, expected, output=output) assert output.getvalue() == " hello (diff)\n world\n" output = io.StringIO() render_diff_range(actual_ranges, actual, output=output) assert output.getvalue() == " goodbye (diff)\n world\n" expected = ["a", "b", "c", "d", "e", "f", "g", "h", "circle", "i", "j"] actual = ["a", "b", "c", "d", "e", "f", "g", "h", "square", "i", "j"] expected_ranges, actual_ranges = diff_ranges(expected, actual) output = io.StringIO() render_diff_range(expected_ranges, expected, output=output, indent=0) assert output.getvalue() == "a\nb\nc\n...\nf\ng\nh\ncircle (diff)\ni\nj\n" output = io.StringIO() render_diff_range(actual_ranges, actual, output=output, indent=0) assert output.getvalue() == "a\nb\nc\n...\nf\ng\nh\nsquare (diff)\ni\nj\n" def test_diff_ranges(self) -> None: a = ["hello", "world"] b = ["hello", "world"] assert diff_ranges(a, b) == ( [(0, 0), (0, 2), (2, 2), (2, 2)], [(0, 0), (0, 2), (2, 2), (2, 2)], ) a = ["hello", "world"] b = ["goodbye", "world"] assert diff_ranges(a, b) == ( [(0, 1), (1, 2), (2, 2), (2, 2)], [(0, 1), (1, 2), (2, 2), (2, 2)], )
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/meta/test_diff_helper.py
Python
NOASSERTION
1,692
""" A "meta test" which tests the parsing of .test files. This is not meant to become exhaustive but to ensure we maintain a basic level of ergonomics for mypy contributors. """ from mypy.test.helpers import Suite from mypy.test.meta._pytest import PytestResult, run_pytest_data_suite def _run_pytest(data_suite: str) -> PytestResult: return run_pytest_data_suite(data_suite, extra_args=[], max_attempts=1) class ParseTestDataSuite(Suite): def test_parse_invalid_case(self) -> None: # Act result = _run_pytest( """ [case abc] s: str [case foo-XFAIL] s: str """ ) # Assert assert "Invalid testcase id 'foo-XFAIL'" in result.stdout def test_parse_invalid_section(self) -> None: # Act result = _run_pytest( """ [case abc] s: str [unknownsection] abc """ ) # Assert expected_lineno = result.input.splitlines().index("[unknownsection]") + 1 expected = ( f".test:{expected_lineno}: Invalid section header [unknownsection] in case 'abc'" ) assert expected in result.stdout def test_bad_ge_version_check(self) -> None: # Act actual = _run_pytest( """ [case abc] s: str [out version>=3.8] abc """ ) # Assert assert "version>=3.8 always true since minimum runtime version is (3, 8)" in actual.stdout def test_bad_eq_version_check(self) -> None: # Act actual = _run_pytest( """ [case abc] s: str [out version==3.7] abc """ ) # Assert assert "version==3.7 always false since minimum runtime version is (3, 8)" in actual.stdout
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/meta/test_parse_data.py
Python
NOASSERTION
1,931
""" A "meta test" which tests the `--update-data` feature for updating .test files. Updating the expected output, especially when it's in the form of inline (comment) assertions, can be brittle, which is why we're "meta-testing" here. """ from mypy.test.helpers import Suite from mypy.test.meta._pytest import PytestResult, dedent_docstring, run_pytest_data_suite def _run_pytest_update_data(data_suite: str) -> PytestResult: """ Runs a suite of data test cases through 'pytest --update-data' until either tests pass or until a maximum number of attempts (needed for incremental tests). """ return run_pytest_data_suite(data_suite, extra_args=["--update-data"], max_attempts=3) class UpdateDataSuite(Suite): def test_update_data(self) -> None: # Note: We test multiple testcases rather than 'test case per test case' # so we could also exercise rewriting multiple testcases at once. result = _run_pytest_update_data( """ [case testCorrect] s: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testWrong] s: str = 42 # E: wrong error [case testXfail-xfail] s: str = 42 # E: wrong error [case testWrongMultiline] s: str = 42 # E: foo \ # N: bar [case testMissingMultiline] s: str = 42; i: int = 'foo' [case testExtraneous] s: str = 'foo' # E: wrong error [case testExtraneousMultiline] s: str = 'foo' # E: foo \ # E: bar [case testExtraneousMultilineNonError] s: str = 'foo' # W: foo \ # N: bar [case testOutCorrect] s: str = 42 [out] main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testOutWrong] s: str = 42 [out] main:1: error: foobar [case testOutWrongIncremental] s: str = 42 [out] main:1: error: foobar [out2] main:1: error: foobar [case testWrongMultipleFiles] import a, b s: str = 42 # E: foo [file a.py] s1: str = 42 # E: bar [file b.py] s2: str = 43 # E: baz [builtins fixtures/list.pyi] """ ) # Assert expected = dedent_docstring( """ [case testCorrect] s: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testWrong] s: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testXfail-xfail] s: str = 42 # E: wrong error [case testWrongMultiline] s: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [case testMissingMultiline] s: str = 42; i: int = 'foo' # E: Incompatible types in assignment (expression has type "int", variable has type "str") \\ # E: Incompatible types in assignment (expression has type "str", variable has type "int") [case testExtraneous] s: str = 'foo' [case testExtraneousMultiline] s: str = 'foo' [case testExtraneousMultilineNonError] s: str = 'foo' [case testOutCorrect] s: str = 42 [out] main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testOutWrong] s: str = 42 [out] main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testOutWrongIncremental] s: str = 42 [out] main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") [out2] main:1: error: Incompatible types in assignment (expression has type "int", variable has type "str") [case testWrongMultipleFiles] import a, b s: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [file a.py] s1: str = 42 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [file b.py] s2: str = 43 # E: Incompatible types in assignment (expression has type "int", variable has type "str") [builtins fixtures/list.pyi] """ ) assert result.input_updated == expected
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/meta/test_update_data.py
Python
NOASSERTION
4,814
from __future__ import annotations import os import shutil import tempfile import unittest import pytest from mypy.find_sources import InvalidSourceList, SourceFinder, create_source_list from mypy.fscache import FileSystemCache from mypy.modulefinder import BuildSource from mypy.options import Options class FakeFSCache(FileSystemCache): def __init__(self, files: set[str]) -> None: self.files = {os.path.abspath(f) for f in files} def isfile(self, file: str) -> bool: return file in self.files def isdir(self, dir: str) -> bool: if not dir.endswith(os.sep): dir += os.sep return any(f.startswith(dir) for f in self.files) def listdir(self, dir: str) -> list[str]: if not dir.endswith(os.sep): dir += os.sep return list({f[len(dir) :].split(os.sep)[0] for f in self.files if f.startswith(dir)}) def init_under_package_root(self, file: str) -> bool: return False def normalise_path(path: str) -> str: path = os.path.splitdrive(path)[1] path = path.replace(os.sep, "/") return path def normalise_build_source_list(sources: list[BuildSource]) -> list[tuple[str, str | None]]: return sorted( (s.module, (normalise_path(s.base_dir) if s.base_dir is not None else None)) for s in sources ) def crawl(finder: SourceFinder, f: str) -> tuple[str, str]: module, base_dir = finder.crawl_up(f) return module, normalise_path(base_dir) def find_sources_in_dir(finder: SourceFinder, f: str) -> list[tuple[str, str | None]]: return normalise_build_source_list(finder.find_sources_in_dir(os.path.abspath(f))) def find_sources( paths: list[str], options: Options, fscache: FileSystemCache ) -> list[tuple[str, str | None]]: paths = [os.path.abspath(p) for p in paths] return normalise_build_source_list(create_source_list(paths, options, fscache)) class SourceFinderSuite(unittest.TestCase): def setUp(self) -> None: self.tempdir = tempfile.mkdtemp() self.oldcwd = os.getcwd() os.chdir(self.tempdir) def tearDown(self) -> None: os.chdir(self.oldcwd) shutil.rmtree(self.tempdir) def test_crawl_no_namespace(self) -> None: options = Options() options.namespace_packages = False finder = SourceFinder(FakeFSCache({"/setup.py"}), options) assert crawl(finder, "/setup.py") == ("setup", "/") finder = SourceFinder(FakeFSCache({"/a/setup.py"}), options) assert crawl(finder, "/a/setup.py") == ("setup", "/a") finder = SourceFinder(FakeFSCache({"/a/b/setup.py"}), options) assert crawl(finder, "/a/b/setup.py") == ("setup", "/a/b") finder = SourceFinder(FakeFSCache({"/a/setup.py", "/a/__init__.py"}), options) assert crawl(finder, "/a/setup.py") == ("a.setup", "/") finder = SourceFinder(FakeFSCache({"/a/invalid-name/setup.py", "/a/__init__.py"}), options) assert crawl(finder, "/a/invalid-name/setup.py") == ("setup", "/a/invalid-name") finder = SourceFinder(FakeFSCache({"/a/b/setup.py", "/a/__init__.py"}), options) assert crawl(finder, "/a/b/setup.py") == ("setup", "/a/b") finder = SourceFinder( FakeFSCache({"/a/b/c/setup.py", "/a/__init__.py", "/a/b/c/__init__.py"}), options ) assert crawl(finder, "/a/b/c/setup.py") == ("c.setup", "/a/b") def test_crawl_namespace(self) -> None: options = Options() options.namespace_packages = True finder = SourceFinder(FakeFSCache({"/setup.py"}), options) assert crawl(finder, "/setup.py") == ("setup", "/") finder = SourceFinder(FakeFSCache({"/a/setup.py"}), options) assert crawl(finder, "/a/setup.py") == ("setup", "/a") finder = SourceFinder(FakeFSCache({"/a/b/setup.py"}), options) assert crawl(finder, "/a/b/setup.py") == ("setup", "/a/b") finder = SourceFinder(FakeFSCache({"/a/setup.py", "/a/__init__.py"}), options) assert crawl(finder, "/a/setup.py") == ("a.setup", "/") finder = SourceFinder(FakeFSCache({"/a/invalid-name/setup.py", "/a/__init__.py"}), options) assert crawl(finder, "/a/invalid-name/setup.py") == ("setup", "/a/invalid-name") finder = SourceFinder(FakeFSCache({"/a/b/setup.py", "/a/__init__.py"}), options) assert crawl(finder, "/a/b/setup.py") == ("a.b.setup", "/") finder = SourceFinder( FakeFSCache({"/a/b/c/setup.py", "/a/__init__.py", "/a/b/c/__init__.py"}), options ) assert crawl(finder, "/a/b/c/setup.py") == ("a.b.c.setup", "/") def test_crawl_namespace_explicit_base(self) -> None: options = Options() options.namespace_packages = True options.explicit_package_bases = True finder = SourceFinder(FakeFSCache({"/setup.py"}), options) assert crawl(finder, "/setup.py") == ("setup", "/") finder = SourceFinder(FakeFSCache({"/a/setup.py"}), options) assert crawl(finder, "/a/setup.py") == ("setup", "/a") finder = SourceFinder(FakeFSCache({"/a/b/setup.py"}), options) assert crawl(finder, "/a/b/setup.py") == ("setup", "/a/b") finder = SourceFinder(FakeFSCache({"/a/setup.py", "/a/__init__.py"}), options) assert crawl(finder, "/a/setup.py") == ("a.setup", "/") finder = SourceFinder(FakeFSCache({"/a/invalid-name/setup.py", "/a/__init__.py"}), options) assert crawl(finder, "/a/invalid-name/setup.py") == ("setup", "/a/invalid-name") finder = SourceFinder(FakeFSCache({"/a/b/setup.py", "/a/__init__.py"}), options) assert crawl(finder, "/a/b/setup.py") == ("a.b.setup", "/") finder = SourceFinder( FakeFSCache({"/a/b/c/setup.py", "/a/__init__.py", "/a/b/c/__init__.py"}), options ) assert crawl(finder, "/a/b/c/setup.py") == ("a.b.c.setup", "/") # set mypy path, so we actually have some explicit base dirs options.mypy_path = ["/a/b"] finder = SourceFinder(FakeFSCache({"/a/b/c/setup.py"}), options) assert crawl(finder, "/a/b/c/setup.py") == ("c.setup", "/a/b") finder = SourceFinder( FakeFSCache({"/a/b/c/setup.py", "/a/__init__.py", "/a/b/c/__init__.py"}), options ) assert crawl(finder, "/a/b/c/setup.py") == ("c.setup", "/a/b") options.mypy_path = ["/a/b", "/a/b/c"] finder = SourceFinder(FakeFSCache({"/a/b/c/setup.py"}), options) assert crawl(finder, "/a/b/c/setup.py") == ("setup", "/a/b/c") def test_crawl_namespace_multi_dir(self) -> None: options = Options() options.namespace_packages = True options.explicit_package_bases = True options.mypy_path = ["/a", "/b"] finder = SourceFinder(FakeFSCache({"/a/pkg/a.py", "/b/pkg/b.py"}), options) assert crawl(finder, "/a/pkg/a.py") == ("pkg.a", "/a") assert crawl(finder, "/b/pkg/b.py") == ("pkg.b", "/b") def test_find_sources_in_dir_no_namespace(self) -> None: options = Options() options.namespace_packages = False files = { "/pkg/a1/b/c/d/e.py", "/pkg/a1/b/f.py", "/pkg/a2/__init__.py", "/pkg/a2/b/c/d/e.py", "/pkg/a2/b/f.py", } finder = SourceFinder(FakeFSCache(files), options) assert find_sources_in_dir(finder, "/") == [ ("a2", "/pkg"), ("e", "/pkg/a1/b/c/d"), ("e", "/pkg/a2/b/c/d"), ("f", "/pkg/a1/b"), ("f", "/pkg/a2/b"), ] def test_find_sources_in_dir_namespace(self) -> None: options = Options() options.namespace_packages = True files = { "/pkg/a1/b/c/d/e.py", "/pkg/a1/b/f.py", "/pkg/a2/__init__.py", "/pkg/a2/b/c/d/e.py", "/pkg/a2/b/f.py", } finder = SourceFinder(FakeFSCache(files), options) assert find_sources_in_dir(finder, "/") == [ ("a2", "/pkg"), ("a2.b.c.d.e", "/pkg"), ("a2.b.f", "/pkg"), ("e", "/pkg/a1/b/c/d"), ("f", "/pkg/a1/b"), ] def test_find_sources_in_dir_namespace_explicit_base(self) -> None: options = Options() options.namespace_packages = True options.explicit_package_bases = True options.mypy_path = ["/"] files = { "/pkg/a1/b/c/d/e.py", "/pkg/a1/b/f.py", "/pkg/a2/__init__.py", "/pkg/a2/b/c/d/e.py", "/pkg/a2/b/f.py", } finder = SourceFinder(FakeFSCache(files), options) assert find_sources_in_dir(finder, "/") == [ ("pkg.a1.b.c.d.e", "/"), ("pkg.a1.b.f", "/"), ("pkg.a2", "/"), ("pkg.a2.b.c.d.e", "/"), ("pkg.a2.b.f", "/"), ] options.mypy_path = ["/pkg"] finder = SourceFinder(FakeFSCache(files), options) assert find_sources_in_dir(finder, "/") == [ ("a1.b.c.d.e", "/pkg"), ("a1.b.f", "/pkg"), ("a2", "/pkg"), ("a2.b.c.d.e", "/pkg"), ("a2.b.f", "/pkg"), ] def test_find_sources_in_dir_namespace_multi_dir(self) -> None: options = Options() options.namespace_packages = True options.explicit_package_bases = True options.mypy_path = ["/a", "/b"] finder = SourceFinder(FakeFSCache({"/a/pkg/a.py", "/b/pkg/b.py"}), options) assert find_sources_in_dir(finder, "/") == [("pkg.a", "/a"), ("pkg.b", "/b")] def test_find_sources_exclude(self) -> None: options = Options() options.namespace_packages = True # default for excluded_dir in ["site-packages", ".whatever", "node_modules", ".x/.z"]: fscache = FakeFSCache({"/dir/a.py", f"/dir/venv/{excluded_dir}/b.py"}) assert find_sources(["/"], options, fscache) == [("a", "/dir")] with pytest.raises(InvalidSourceList): find_sources(["/dir/venv/"], options, fscache) assert find_sources([f"/dir/venv/{excluded_dir}"], options, fscache) == [ ("b", f"/dir/venv/{excluded_dir}") ] assert find_sources([f"/dir/venv/{excluded_dir}/b.py"], options, fscache) == [ ("b", f"/dir/venv/{excluded_dir}") ] files = { "/pkg/a1/b/c/d/e.py", "/pkg/a1/b/f.py", "/pkg/a2/__init__.py", "/pkg/a2/b/c/d/e.py", "/pkg/a2/b/f.py", } # file name options.exclude = [r"/f\.py$"] fscache = FakeFSCache(files) assert find_sources(["/"], options, fscache) == [ ("a2", "/pkg"), ("a2.b.c.d.e", "/pkg"), ("e", "/pkg/a1/b/c/d"), ] assert find_sources(["/pkg/a1/b/f.py"], options, fscache) == [("f", "/pkg/a1/b")] assert find_sources(["/pkg/a2/b/f.py"], options, fscache) == [("a2.b.f", "/pkg")] # directory name options.exclude = ["/a1/"] fscache = FakeFSCache(files) assert find_sources(["/"], options, fscache) == [ ("a2", "/pkg"), ("a2.b.c.d.e", "/pkg"), ("a2.b.f", "/pkg"), ] with pytest.raises(InvalidSourceList): find_sources(["/pkg/a1"], options, fscache) with pytest.raises(InvalidSourceList): find_sources(["/pkg/a1/"], options, fscache) with pytest.raises(InvalidSourceList): find_sources(["/pkg/a1/b"], options, fscache) options.exclude = ["/a1/$"] assert find_sources(["/pkg/a1"], options, fscache) == [ ("e", "/pkg/a1/b/c/d"), ("f", "/pkg/a1/b"), ] # paths options.exclude = ["/pkg/a1/"] fscache = FakeFSCache(files) assert find_sources(["/"], options, fscache) == [ ("a2", "/pkg"), ("a2.b.c.d.e", "/pkg"), ("a2.b.f", "/pkg"), ] with pytest.raises(InvalidSourceList): find_sources(["/pkg/a1"], options, fscache) # OR two patterns together for orred in [["/(a1|a3)/"], ["a1", "a3"], ["a3", "a1"]]: options.exclude = orred fscache = FakeFSCache(files) assert find_sources(["/"], options, fscache) == [ ("a2", "/pkg"), ("a2.b.c.d.e", "/pkg"), ("a2.b.f", "/pkg"), ] options.exclude = ["b/c/"] fscache = FakeFSCache(files) assert find_sources(["/"], options, fscache) == [ ("a2", "/pkg"), ("a2.b.f", "/pkg"), ("f", "/pkg/a1/b"), ] # nothing should be ignored as a result of this big_exclude1 = [ "/pkg/a/", "/2", "/1", "/pk/", "/kg", "/g.py", "/bc", "/xxx/pkg/a2/b/f.py", "xxx/pkg/a2/b/f.py", ] big_exclude2 = ["|".join(big_exclude1)] for big_exclude in [big_exclude1, big_exclude2]: options.exclude = big_exclude fscache = FakeFSCache(files) assert len(find_sources(["/"], options, fscache)) == len(files) files = { "pkg/a1/b/c/d/e.py", "pkg/a1/b/f.py", "pkg/a2/__init__.py", "pkg/a2/b/c/d/e.py", "pkg/a2/b/f.py", } fscache = FakeFSCache(files) assert len(find_sources(["."], options, fscache)) == len(files)
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/test_find_sources.py
Python
NOASSERTION
13,684
"""Test exporting line-level reference information (undocumented feature)""" from __future__ import annotations import json import os import sys from mypy import build from mypy.modulefinder import BuildSource from mypy.options import Options from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal class RefInfoSuite(DataSuite): required_out_section = True files = ["ref-info.test"] def run_case(self, testcase: DataDrivenTestCase) -> None: options = Options() options.use_builtins_fixtures = True options.show_traceback = True options.export_ref_info = True # This is the flag we are testing src = "\n".join(testcase.input) result = build.build( sources=[BuildSource("main", None, src)], options=options, alt_lib_path=test_temp_dir ) assert not result.errors major, minor = sys.version_info[:2] ref_path = os.path.join(options.cache_dir, f"{major}.{minor}", "__main__.refs.json") with open(ref_path) as refs_file: data = json.load(refs_file) a = [] for item in data: a.append(f"{item['line']}:{item['column']}:{item['target']}") assert_string_arrays_equal( testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})" )
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/test_ref_info.py
Python
NOASSERTION
1,432
from __future__ import annotations import sys from io import StringIO import mypy.api from mypy.test.helpers import Suite class APISuite(Suite): def setUp(self) -> None: self.sys_stdout = sys.stdout self.sys_stderr = sys.stderr sys.stdout = self.stdout = StringIO() sys.stderr = self.stderr = StringIO() def tearDown(self) -> None: sys.stdout = self.sys_stdout sys.stderr = self.sys_stderr assert self.stdout.getvalue() == "" assert self.stderr.getvalue() == "" def test_capture_bad_opt(self) -> None: """stderr should be captured when a bad option is passed.""" _, stderr, _ = mypy.api.run(["--some-bad-option"]) assert isinstance(stderr, str) assert stderr != "" def test_capture_empty(self) -> None: """stderr should be captured when a bad option is passed.""" _, stderr, _ = mypy.api.run([]) assert isinstance(stderr, str) assert stderr != "" def test_capture_help(self) -> None: """stdout should be captured when --help is passed.""" stdout, _, _ = mypy.api.run(["--help"]) assert isinstance(stdout, str) assert stdout != "" def test_capture_version(self) -> None: """stdout should be captured when --version is passed.""" stdout, _, _ = mypy.api.run(["--version"]) assert isinstance(stdout, str) assert stdout != ""
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testapi.py
Python
NOASSERTION
1,447
"""Ensure the argparse parser and Options class are in sync. In particular, verify that the argparse defaults are the same as the Options defaults, and that argparse doesn't assign any new members to the Options object it creates. """ from __future__ import annotations import argparse import sys from mypy.main import infer_python_executable, process_options from mypy.options import Options from mypy.test.helpers import Suite, assert_equal class ArgSuite(Suite): def test_coherence(self) -> None: options = Options() _, parsed_options = process_options([], require_targets=False) # FIX: test this too. Requires changing working dir to avoid finding 'setup.cfg' options.config_file = parsed_options.config_file assert_equal(options.snapshot(), parsed_options.snapshot()) def test_executable_inference(self) -> None: """Test the --python-executable flag with --python-version""" sys_ver_str = "{ver.major}.{ver.minor}".format(ver=sys.version_info) base = ["file.py"] # dummy file # test inference given one (infer the other) matching_version = base + [f"--python-version={sys_ver_str}"] _, options = process_options(matching_version) assert options.python_version == sys.version_info[:2] assert options.python_executable == sys.executable matching_version = base + [f"--python-executable={sys.executable}"] _, options = process_options(matching_version) assert options.python_version == sys.version_info[:2] assert options.python_executable == sys.executable # test inference given both matching_version = base + [ f"--python-version={sys_ver_str}", f"--python-executable={sys.executable}", ] _, options = process_options(matching_version) assert options.python_version == sys.version_info[:2] assert options.python_executable == sys.executable # test that --no-site-packages will disable executable inference matching_version = base + [f"--python-version={sys_ver_str}", "--no-site-packages"] _, options = process_options(matching_version) assert options.python_version == sys.version_info[:2] assert options.python_executable is None # Test setting python_version/executable from config file special_opts = argparse.Namespace() special_opts.python_executable = None special_opts.python_version = None special_opts.no_executable = None # first test inferring executable from version options = Options() options.python_executable = None options.python_version = sys.version_info[:2] infer_python_executable(options, special_opts) assert options.python_version == sys.version_info[:2] assert options.python_executable == sys.executable # then test inferring version from executable options = Options() options.python_executable = sys.executable infer_python_executable(options, special_opts) assert options.python_version == sys.version_info[:2] assert options.python_executable == sys.executable
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testargs.py
Python
NOASSERTION
3,213
"""Type checker test cases""" from __future__ import annotations import os import re import sys from mypy import build from mypy.build import Graph from mypy.errors import CompileError from mypy.modulefinder import BuildSource, FindModuleCache, SearchPaths from mypy.test.config import test_data_prefix, test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite, FileOperation, module_from_path from mypy.test.helpers import ( assert_module_equivalence, assert_string_arrays_equal, assert_target_equivalence, check_test_output_files, find_test_files, normalize_error_messages, parse_options, perform_file_operations, ) from mypy.test.update_data import update_testcase_output try: import lxml # type: ignore[import-untyped] except ImportError: lxml = None import pytest # List of files that contain test case descriptions. # Includes all check-* files with the .test extension in the test-data/unit directory typecheck_files = find_test_files(pattern="check-*.test") # Tests that use Python version specific features: if sys.version_info < (3, 9): typecheck_files.remove("check-python39.test") if sys.version_info < (3, 10): typecheck_files.remove("check-python310.test") if sys.version_info < (3, 11): typecheck_files.remove("check-python311.test") if sys.version_info < (3, 12): typecheck_files.remove("check-python312.test") if sys.version_info < (3, 13): typecheck_files.remove("check-python313.test") # Special tests for platforms with case-insensitive filesystems. if sys.platform not in ("darwin", "win32"): typecheck_files.remove("check-modules-case.test") class TypeCheckSuite(DataSuite): files = typecheck_files def run_case(self, testcase: DataDrivenTestCase) -> None: if lxml is None and os.path.basename(testcase.file) == "check-reports.test": pytest.skip("Cannot import lxml. Is it installed?") incremental = ( "incremental" in testcase.name.lower() or "incremental" in testcase.file or "serialize" in testcase.file ) if incremental: # Incremental tests are run once with a cold cache, once with a warm cache. # Expect success on first run, errors from testcase.output (if any) on second run. num_steps = max([2] + list(testcase.output2.keys())) # Check that there are no file changes beyond the last run (they would be ignored). for dn, dirs, files in os.walk(os.curdir): for file in files: m = re.search(r"\.([2-9])$", file) if m and int(m.group(1)) > num_steps: raise ValueError( "Output file {} exists though test case only has {} runs".format( file, num_steps ) ) steps = testcase.find_steps() for step in range(1, num_steps + 1): idx = step - 2 ops = steps[idx] if idx < len(steps) and idx >= 0 else [] self.run_case_once(testcase, ops, step) else: self.run_case_once(testcase) def _sort_output_if_needed(self, testcase: DataDrivenTestCase, a: list[str]) -> None: idx = testcase.output_inline_start if not testcase.files or idx == len(testcase.output): return def _filename(_msg: str) -> str: return _msg.partition(":")[0] file_weights = {file: idx for idx, file in enumerate(_filename(msg) for msg in a)} testcase.output[idx:] = sorted( testcase.output[idx:], key=lambda msg: file_weights.get(_filename(msg), -1) ) def run_case_once( self, testcase: DataDrivenTestCase, operations: list[FileOperation] | None = None, incremental_step: int = 0, ) -> None: if operations is None: operations = [] original_program_text = "\n".join(testcase.input) module_data = self.parse_module(original_program_text, incremental_step) # Unload already loaded plugins, they may be updated. for file, _ in testcase.files: module = module_from_path(file) if module.endswith("_plugin") and module in sys.modules: del sys.modules[module] if incremental_step == 0 or incremental_step == 1: # In run 1, copy program text to program file. for module_name, program_path, program_text in module_data: if module_name == "__main__": with open(program_path, "w", encoding="utf8") as f: f.write(program_text) break elif incremental_step > 1: # In runs 2+, copy *.[num] files to * files. perform_file_operations(operations) # Parse options after moving files (in case mypy.ini is being moved). options = parse_options(original_program_text, testcase, incremental_step) options.use_builtins_fixtures = True options.show_traceback = True # Enable some options automatically based on test file name. if "columns" in testcase.file: options.show_column_numbers = True if "errorcodes" in testcase.file: options.hide_error_codes = False if "abstract" not in testcase.file: options.allow_empty_bodies = not testcase.name.endswith("_no_empty") if "lowercase" not in testcase.file: options.force_uppercase_builtins = True if "union-error" not in testcase.file: options.force_union_syntax = True if incremental_step and options.incremental: # Don't overwrite # flags: --no-incremental in incremental test cases options.incremental = True else: options.incremental = False # Don't waste time writing cache unless we are specifically looking for it if not testcase.writescache: options.cache_dir = os.devnull sources = [] for module_name, program_path, program_text in module_data: # Always set to none so we're forced to reread the module in incremental mode sources.append( BuildSource(program_path, module_name, None if incremental_step else program_text) ) plugin_dir = os.path.join(test_data_prefix, "plugins") sys.path.insert(0, plugin_dir) res = None try: res = build.build(sources=sources, options=options, alt_lib_path=test_temp_dir) a = res.errors except CompileError as e: a = e.messages finally: assert sys.path[0] == plugin_dir del sys.path[0] if testcase.normalize_output: a = normalize_error_messages(a) # Make sure error messages match if incremental_step < 2: if incremental_step == 1: msg = "Unexpected type checker output in incremental, run 1 ({}, line {})" else: assert incremental_step == 0 msg = "Unexpected type checker output ({}, line {})" self._sort_output_if_needed(testcase, a) output = testcase.output else: msg = ( f"Unexpected type checker output in incremental, run {incremental_step}" + " ({}, line {})" ) output = testcase.output2.get(incremental_step, []) if output != a and testcase.config.getoption("--update-data", False): update_testcase_output(testcase, a, incremental_step=incremental_step) assert_string_arrays_equal(output, a, msg.format(testcase.file, testcase.line)) if res: if options.cache_dir != os.devnull: self.verify_cache(module_data, res.errors, res.manager, res.graph) name = "targets" if incremental_step: name += str(incremental_step + 1) expected = testcase.expected_fine_grained_targets.get(incremental_step + 1) actual = [ target for module, target in res.manager.processed_targets if module in testcase.test_modules ] if expected is not None: assert_target_equivalence(name, expected, actual) if incremental_step > 1: suffix = "" if incremental_step == 2 else str(incremental_step - 1) expected_rechecked = testcase.expected_rechecked_modules.get(incremental_step - 1) if expected_rechecked is not None: assert_module_equivalence( "rechecked" + suffix, expected_rechecked, res.manager.rechecked_modules ) expected_stale = testcase.expected_stale_modules.get(incremental_step - 1) if expected_stale is not None: assert_module_equivalence( "stale" + suffix, expected_stale, res.manager.stale_modules ) if testcase.output_files: check_test_output_files(testcase, incremental_step, strip_prefix="tmp/") def verify_cache( self, module_data: list[tuple[str, str, str]], a: list[str], manager: build.BuildManager, graph: Graph, ) -> None: # There should be valid cache metadata for each module except # for those that had an error in themselves or one of their # dependencies. error_paths = self.find_error_message_paths(a) busted_paths = {m.path for id, m in manager.modules.items() if graph[id].transitive_error} modules = self.find_module_files(manager) modules.update({module_name: path for module_name, path, text in module_data}) missing_paths = self.find_missing_cache_files(modules, manager) # We would like to assert error_paths.issubset(busted_paths) # but this runs into trouble because while some 'notes' are # really errors that cause an error to be marked, many are # just notes attached to other errors. assert error_paths or not busted_paths, "Some modules reported error despite no errors" if not missing_paths == busted_paths: raise AssertionError(f"cache data discrepancy {missing_paths} != {busted_paths}") assert os.path.isfile(os.path.join(manager.options.cache_dir, ".gitignore")) cachedir_tag = os.path.join(manager.options.cache_dir, "CACHEDIR.TAG") assert os.path.isfile(cachedir_tag) with open(cachedir_tag) as f: assert f.read().startswith("Signature: 8a477f597d28d172789f06886806bc55") def find_error_message_paths(self, a: list[str]) -> set[str]: hits = set() for line in a: m = re.match(r"([^\s:]+):(\d+:)?(\d+:)? (error|warning|note):", line) if m: p = m.group(1) hits.add(p) return hits def find_module_files(self, manager: build.BuildManager) -> dict[str, str]: return {id: module.path for id, module in manager.modules.items()} def find_missing_cache_files( self, modules: dict[str, str], manager: build.BuildManager ) -> set[str]: ignore_errors = True missing = {} for id, path in modules.items(): meta = build.find_cache_meta(id, path, manager) if not build.validate_meta(meta, id, path, ignore_errors, manager): missing[id] = path return set(missing.values()) def parse_module( self, program_text: str, incremental_step: int = 0 ) -> list[tuple[str, str, str]]: """Return the module and program names for a test case. Normally, the unit tests will parse the default ('__main__') module and follow all the imports listed there. You can override this behavior and instruct the tests to check multiple modules by using a comment like this in the test case input: # cmd: mypy -m foo.bar foo.baz You can also use `# cmdN:` to have a different cmd for incremental step N (2, 3, ...). Return a list of tuples (module name, file name, program text). """ m = re.search("# cmd: mypy -m ([a-zA-Z0-9_. ]+)$", program_text, flags=re.MULTILINE) if incremental_step > 1: alt_regex = f"# cmd{incremental_step}: mypy -m ([a-zA-Z0-9_. ]+)$" alt_m = re.search(alt_regex, program_text, flags=re.MULTILINE) if alt_m is not None: # Optionally return a different command if in a later step # of incremental mode, otherwise default to reusing the # original cmd. m = alt_m if m: # The test case wants to use a non-default main # module. Look up the module and give it as the thing to # analyze. module_names = m.group(1) out = [] search_paths = SearchPaths((test_temp_dir,), (), (), ()) cache = FindModuleCache(search_paths, fscache=None, options=None) for module_name in module_names.split(" "): path = cache.find_module(module_name) assert isinstance(path, str), f"Can't find ad hoc case file: {module_name}" with open(path, encoding="utf8") as f: program_text = f.read() out.append((module_name, path, program_text)) return out else: return [("__main__", "main", program_text)]
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testcheck.py
Python
NOASSERTION
13,744
"""Test cases for the command line. To begin we test that "mypy <directory>[/]" always recurses down the whole tree. """ from __future__ import annotations import os import re import subprocess import sys from mypy.test.config import PREFIX, test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import ( assert_string_arrays_equal, check_test_output_files, normalize_error_messages, ) try: import lxml # type: ignore[import-untyped] except ImportError: lxml = None import pytest # Path to Python 3 interpreter python3_path = sys.executable # Files containing test case descriptions. cmdline_files = ["cmdline.test", "cmdline.pyproject.test", "reports.test", "envvars.test"] class PythonCmdlineSuite(DataSuite): files = cmdline_files native_sep = True def run_case(self, testcase: DataDrivenTestCase) -> None: if lxml is None and os.path.basename(testcase.file) == "reports.test": pytest.skip("Cannot import lxml. Is it installed?") for step in [1] + sorted(testcase.output2): test_python_cmdline(testcase, step) def test_python_cmdline(testcase: DataDrivenTestCase, step: int) -> None: assert testcase.old_cwd is not None, "test was not properly set up" # Write the program to a file. program = "_program.py" program_path = os.path.join(test_temp_dir, program) with open(program_path, "w", encoding="utf8") as file: for s in testcase.input: file.write(f"{s}\n") args = parse_args(testcase.input[0]) custom_cwd = parse_cwd(testcase.input[1]) if len(testcase.input) > 1 else None args.append("--show-traceback") if "--error-summary" not in args: args.append("--no-error-summary") if "--show-error-codes" not in args: args.append("--hide-error-codes") if "--disallow-empty-bodies" not in args: args.append("--allow-empty-bodies") if "--no-force-uppercase-builtins" not in args: args.append("--force-uppercase-builtins") if "--no-force-union-syntax" not in args: args.append("--force-union-syntax") # Type check the program. fixed = [python3_path, "-m", "mypy"] env = os.environ.copy() env.pop("COLUMNS", None) extra_path = os.path.join(os.path.abspath(test_temp_dir), "pypath") env["PYTHONPATH"] = PREFIX if os.path.isdir(extra_path): env["PYTHONPATH"] += os.pathsep + extra_path cwd = os.path.join(test_temp_dir, custom_cwd or "") args = [arg.replace("$CWD", os.path.abspath(cwd)) for arg in args] process = subprocess.Popen( fixed + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env ) outb, errb = process.communicate() result = process.returncode # Split output into lines. out = [s.rstrip("\n\r") for s in str(outb, "utf8").splitlines()] err = [s.rstrip("\n\r") for s in str(errb, "utf8").splitlines()] if "PYCHARM_HOSTED" in os.environ: for pos, line in enumerate(err): if line.startswith("pydev debugger: "): # Delete the attaching debugger message itself, plus the extra newline added. del err[pos : pos + 2] break # Remove temp file. os.remove(program_path) # Compare actual output to expected. if testcase.output_files: # Ignore stdout, but we insist on empty stderr and zero status. if err or result: raise AssertionError( "Expected zero status and empty stderr%s, got %d and\n%s" % (" on step %d" % step if testcase.output2 else "", result, "\n".join(err + out)) ) check_test_output_files(testcase, step) else: if testcase.normalize_output: out = normalize_error_messages(err + out) obvious_result = 1 if out else 0 if obvious_result != result: out.append(f"== Return code: {result}") expected_out = testcase.output if step == 1 else testcase.output2[step] # Strip "tmp/" out of the test so that # E: works... expected_out = [s.replace("tmp" + os.sep, "") for s in expected_out] assert_string_arrays_equal( expected_out, out, "Invalid output ({}, line {}){}".format( testcase.file, testcase.line, " on step %d" % step if testcase.output2 else "" ), ) def parse_args(line: str) -> list[str]: """Parse the first line of the program for the command line. This should have the form # cmd: mypy <options> For example: # cmd: mypy pkg/ """ m = re.match("# cmd: mypy (.*)$", line) if not m: return [] # No args; mypy will spit out an error. return m.group(1).split() def parse_cwd(line: str) -> str | None: """Parse the second line of the program for the command line. This should have the form # cwd: <directory> For example: # cwd: main/subdir """ m = re.match("# cwd: (.*)$", line) return m.group(1) if m else None
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testcmdline.py
Python
NOASSERTION
5,082
from __future__ import annotations from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint, infer_constraints from mypy.test.helpers import Suite from mypy.test.typefixture import TypeFixture from mypy.types import Instance, TupleType, UnpackType class ConstraintsSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture() def test_no_type_variables(self) -> None: assert not infer_constraints(self.fx.o, self.fx.o, SUBTYPE_OF) def test_basic_type_variable(self) -> None: fx = self.fx for direction in [SUBTYPE_OF, SUPERTYPE_OF]: assert infer_constraints(fx.gt, fx.ga, direction) == [ Constraint(type_var=fx.t, op=direction, target=fx.a) ] def test_basic_type_var_tuple_subtype(self) -> None: fx = self.fx assert infer_constraints( Instance(fx.gvi, [UnpackType(fx.ts)]), Instance(fx.gvi, [fx.a, fx.b]), SUBTYPE_OF ) == [ Constraint(type_var=fx.ts, op=SUBTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple)) ] def test_basic_type_var_tuple(self) -> None: fx = self.fx assert set( infer_constraints( Instance(fx.gvi, [UnpackType(fx.ts)]), Instance(fx.gvi, [fx.a, fx.b]), SUPERTYPE_OF ) ) == { Constraint( type_var=fx.ts, op=SUPERTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple) ), Constraint( type_var=fx.ts, op=SUBTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple) ), } def test_type_var_tuple_with_prefix_and_suffix(self) -> None: fx = self.fx assert set( infer_constraints( Instance(fx.gv2i, [fx.t, UnpackType(fx.ts), fx.s]), Instance(fx.gv2i, [fx.a, fx.b, fx.c, fx.d]), SUPERTYPE_OF, ) ) == { Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a), Constraint( type_var=fx.ts, op=SUPERTYPE_OF, target=TupleType([fx.b, fx.c], fx.std_tuple) ), Constraint( type_var=fx.ts, op=SUBTYPE_OF, target=TupleType([fx.b, fx.c], fx.std_tuple) ), Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.d), } def test_unpack_homogenous_tuple(self) -> None: fx = self.fx assert set( infer_constraints( Instance(fx.gvi, [UnpackType(Instance(fx.std_tuplei, [fx.t]))]), Instance(fx.gvi, [fx.a, fx.b]), SUPERTYPE_OF, ) ) == { Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a), Constraint(type_var=fx.t, op=SUBTYPE_OF, target=fx.a), Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.b), Constraint(type_var=fx.t, op=SUBTYPE_OF, target=fx.b), } def test_unpack_homogenous_tuple_with_prefix_and_suffix(self) -> None: fx = self.fx assert set( infer_constraints( Instance(fx.gv2i, [fx.t, UnpackType(Instance(fx.std_tuplei, [fx.s])), fx.u]), Instance(fx.gv2i, [fx.a, fx.b, fx.c, fx.d]), SUPERTYPE_OF, ) ) == { Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a), Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.b), Constraint(type_var=fx.s, op=SUBTYPE_OF, target=fx.b), Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.c), Constraint(type_var=fx.s, op=SUBTYPE_OF, target=fx.c), Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d), } def test_unpack_with_prefix_and_suffix(self) -> None: fx = self.fx assert set( infer_constraints( Instance(fx.gv2i, [fx.u, fx.t, fx.s, fx.u]), Instance(fx.gv2i, [fx.a, fx.b, fx.c, fx.d]), SUPERTYPE_OF, ) ) == { Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.a), Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.b), Constraint(type_var=fx.t, op=SUBTYPE_OF, target=fx.b), Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.c), Constraint(type_var=fx.s, op=SUBTYPE_OF, target=fx.c), Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d), } def test_unpack_tuple_length_non_match(self) -> None: fx = self.fx assert set( infer_constraints( Instance(fx.gv2i, [fx.u, fx.t, fx.s, fx.u]), Instance(fx.gv2i, [fx.a, fx.b, fx.d]), SUPERTYPE_OF, ) # We still get constraints on the prefix/suffix in this case. ) == { Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.a), Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d), } def test_var_length_tuple_with_fixed_length_tuple(self) -> None: fx = self.fx assert not infer_constraints( TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o])), Instance(fx.std_tuplei, [fx.a]), SUPERTYPE_OF, )
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testconstraints.py
Python
NOASSERTION
5,265
"""End-to-end test cases for the daemon (dmypy). These are special because they run multiple shell commands. This also includes some unit tests. """ from __future__ import annotations import os import subprocess import sys import tempfile import unittest from mypy.dmypy_server import filter_out_missing_top_level_packages from mypy.fscache import FileSystemCache from mypy.modulefinder import SearchPaths from mypy.test.config import PREFIX, test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages # Files containing test cases descriptions. daemon_files = ["daemon.test"] class DaemonSuite(DataSuite): files = daemon_files def run_case(self, testcase: DataDrivenTestCase) -> None: try: test_daemon(testcase) finally: # Kill the daemon if it's still running. run_cmd("dmypy kill") def test_daemon(testcase: DataDrivenTestCase) -> None: assert testcase.old_cwd is not None, "test was not properly set up" for i, step in enumerate(parse_script(testcase.input)): cmd = step[0] expected_lines = step[1:] assert cmd.startswith("$") cmd = cmd[1:].strip() cmd = cmd.replace("{python}", sys.executable) sts, output = run_cmd(cmd) output_lines = output.splitlines() output_lines = normalize_error_messages(output_lines) if sts: output_lines.append("== Return code: %d" % sts) assert_string_arrays_equal( expected_lines, output_lines, "Command %d (%s) did not give expected output" % (i + 1, cmd), ) def parse_script(input: list[str]) -> list[list[str]]: """Parse testcase.input into steps. Each command starts with a line starting with '$'. The first line (less '$') is sent to the shell. The remaining lines are expected output. """ steps = [] step: list[str] = [] for line in input: if line.startswith("$"): if step: assert step[0].startswith("$") steps.append(step) step = [] step.append(line) if step: steps.append(step) return steps def run_cmd(input: str) -> tuple[int, str]: if input[1:].startswith("mypy run --") and "--show-error-codes" not in input: input += " --hide-error-codes" if input.startswith("dmypy "): input = sys.executable + " -m mypy." + input if input.startswith("mypy "): input = sys.executable + " -m" + input env = os.environ.copy() env["PYTHONPATH"] = PREFIX try: output = subprocess.check_output( input, shell=True, stderr=subprocess.STDOUT, text=True, cwd=test_temp_dir, env=env ) return 0, output except subprocess.CalledProcessError as err: return err.returncode, err.output class DaemonUtilitySuite(unittest.TestCase): """Unit tests for helpers""" def test_filter_out_missing_top_level_packages(self) -> None: with tempfile.TemporaryDirectory() as td: self.make_file(td, "base/a/") self.make_file(td, "base/b.py") self.make_file(td, "base/c.pyi") self.make_file(td, "base/missing.txt") self.make_file(td, "typeshed/d.pyi") self.make_file(td, "typeshed/@python2/e") # outdated self.make_file(td, "pkg1/f-stubs") self.make_file(td, "pkg2/g-python2-stubs") # outdated self.make_file(td, "mpath/sub/long_name/") def makepath(p: str) -> str: return os.path.join(td, p) search = SearchPaths( python_path=(makepath("base"),), mypy_path=(makepath("mpath/sub"),), package_path=(makepath("pkg1"), makepath("pkg2")), typeshed_path=(makepath("typeshed"),), ) fscache = FileSystemCache() res = filter_out_missing_top_level_packages( {"a", "b", "c", "d", "e", "f", "g", "long_name", "ff", "missing"}, search, fscache ) assert res == {"a", "b", "c", "d", "f", "long_name"} def make_file(self, base: str, path: str) -> None: fullpath = os.path.join(base, path) os.makedirs(os.path.dirname(fullpath), exist_ok=True) if not path.endswith("/"): with open(fullpath, "w") as f: f.write("# test file")
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testdaemon.py
Python
NOASSERTION
4,511
"""Test cases for generating node-level dependencies (for fine-grained incremental checking)""" from __future__ import annotations import os import sys from collections import defaultdict import pytest from mypy import build from mypy.errors import CompileError from mypy.modulefinder import BuildSource from mypy.nodes import Expression, MypyFile from mypy.options import Options from mypy.server.deps import get_dependencies from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, find_test_files, parse_options from mypy.types import Type from mypy.typestate import type_state # Only dependencies in these modules are dumped dumped_modules = ["__main__", "pkg", "pkg.mod"] class GetDependenciesSuite(DataSuite): files = find_test_files(pattern="deps*.test") def run_case(self, testcase: DataDrivenTestCase) -> None: src = "\n".join(testcase.input) dump_all = "# __dump_all__" in src options = parse_options(src, testcase, incremental_step=1) if options.python_version > sys.version_info: pytest.skip("Test case requires a newer Python version") options.use_builtins_fixtures = True options.show_traceback = True options.cache_dir = os.devnull options.export_types = True options.preserve_asts = True options.allow_empty_bodies = True messages, files, type_map = self.build(src, options) a = messages if files is None or type_map is None: if not a: a = ["Unknown compile error (likely syntax error in test case or fixture)"] else: deps: defaultdict[str, set[str]] = defaultdict(set) for module, file in files.items(): if (module in dumped_modules or dump_all) and (module in testcase.test_modules): new_deps = get_dependencies(file, type_map, options.python_version, options) for source in new_deps: deps[source].update(new_deps[source]) type_state.add_all_protocol_deps(deps) for source, targets in sorted(deps.items()): if source.startswith(("<enum", "<typing", "<mypy", "<_typeshed.")): # Remove noise. continue line = f"{source} -> {', '.join(sorted(targets))}" # Clean up output a bit line = line.replace("__main__", "m") a.append(line) assert_string_arrays_equal( testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})" ) def build( self, source: str, options: Options ) -> tuple[list[str], dict[str, MypyFile] | None, dict[Expression, Type] | None]: try: result = build.build( sources=[BuildSource("main", None, source)], options=options, alt_lib_path=test_temp_dir, ) except CompileError as e: # TODO: Should perhaps not return None here. return e.messages, None, None return result.errors, result.files, result.types
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testdeps.py
Python
NOASSERTION
3,236
"""Test cases for AST diff (used for fine-grained incremental checking)""" from __future__ import annotations import os import sys import pytest from mypy import build from mypy.errors import CompileError from mypy.modulefinder import BuildSource from mypy.nodes import MypyFile from mypy.options import Options from mypy.server.astdiff import compare_symbol_table_snapshots, snapshot_symbol_table from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, parse_options class ASTDiffSuite(DataSuite): files = ["diff.test"] def run_case(self, testcase: DataDrivenTestCase) -> None: first_src = "\n".join(testcase.input) files_dict = dict(testcase.files) second_src = files_dict["tmp/next.py"] options = parse_options(first_src, testcase, 1) if options.python_version > sys.version_info: pytest.skip("Test case requires a newer Python version") messages1, files1 = self.build(first_src, options) messages2, files2 = self.build(second_src, options) a = [] if messages1: a.extend(messages1) if messages2: a.append("== next ==") a.extend(messages2) assert ( files1 is not None and files2 is not None ), "cases where CompileError occurred should not be run" prefix = "__main__" snapshot1 = snapshot_symbol_table(prefix, files1["__main__"].names) snapshot2 = snapshot_symbol_table(prefix, files2["__main__"].names) diff = compare_symbol_table_snapshots(prefix, snapshot1, snapshot2) for trigger in sorted(diff): a.append(trigger) assert_string_arrays_equal( testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})" ) def build(self, source: str, options: Options) -> tuple[list[str], dict[str, MypyFile] | None]: options.use_builtins_fixtures = True options.show_traceback = True options.cache_dir = os.devnull options.allow_empty_bodies = True try: result = build.build( sources=[BuildSource("main", None, source)], options=options, alt_lib_path=test_temp_dir, ) except CompileError as e: # TODO: Is it okay to return None? return e.messages, None return result.errors, result.files
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testdiff.py
Python
NOASSERTION
2,510
"""Tests for mypy incremental error output.""" from __future__ import annotations from mypy import build from mypy.errors import CompileError from mypy.modulefinder import BuildSource from mypy.options import Options from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal class ErrorStreamSuite(DataSuite): required_out_section = True base_path = "." files = ["errorstream.test"] def run_case(self, testcase: DataDrivenTestCase) -> None: test_error_stream(testcase) def test_error_stream(testcase: DataDrivenTestCase) -> None: """Perform a single error streaming test case. The argument contains the description of the test case. """ options = Options() options.show_traceback = True options.hide_error_codes = True logged_messages: list[str] = [] def flush_errors(filename: str | None, msgs: list[str], serious: bool) -> None: if msgs: logged_messages.append("==== Errors flushed ====") logged_messages.extend(msgs) sources = [BuildSource("main", "__main__", "\n".join(testcase.input))] try: build.build(sources=sources, options=options, flush_errors=flush_errors) except CompileError as e: assert e.messages == [] assert_string_arrays_equal( testcase.output, logged_messages, f"Invalid output ({testcase.file}, line {testcase.line})" )
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testerrorstream.py
Python
NOASSERTION
1,441
"""Test cases for fine-grained incremental checking. Each test cases runs a batch build followed by one or more fine-grained incremental steps. We verify that each step produces the expected output. See the comment at the top of test-data/unit/fine-grained.test for more information. N.B.: Unlike most of the other test suites, testfinegrained does not rely on an alt_lib_path for finding source files. This means that they can test interactions with the lib_path that is built implicitly based on specified sources. """ from __future__ import annotations import os import re import sys import unittest from typing import Any import pytest from mypy import build from mypy.config_parser import parse_config_file from mypy.dmypy_server import Server from mypy.dmypy_util import DEFAULT_STATUS_FILE from mypy.errors import CompileError from mypy.find_sources import create_source_list from mypy.modulefinder import BuildSource from mypy.options import Options from mypy.server.mergecheck import check_consistency from mypy.server.update import sort_messages_preserving_file_order from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite, DeleteFile, UpdateFile from mypy.test.helpers import ( assert_module_equivalence, assert_string_arrays_equal, assert_target_equivalence, find_test_files, parse_options, perform_file_operations, ) # Set to True to perform (somewhat expensive) checks for duplicate AST nodes after merge CHECK_CONSISTENCY = False class FineGrainedSuite(DataSuite): files = find_test_files( pattern="fine-grained*.test", exclude=["fine-grained-cache-incremental.test"] ) # Whether to use the fine-grained cache in the testing. This is overridden # by a trivial subclass to produce a suite that uses the cache. use_cache = False def should_skip(self, testcase: DataDrivenTestCase) -> bool: # Decide whether to skip the test. This could have been structured # as a filter() classmethod also, but we want the tests reported # as skipped, not just elided. if self.use_cache: if testcase.only_when == "-only_when_nocache": return True # TODO: In caching mode we currently don't well support # starting from cached states with errors in them. if testcase.output and testcase.output[0] != "==": return True else: if testcase.only_when == "-only_when_cache": return True return False def run_case(self, testcase: DataDrivenTestCase) -> None: if self.should_skip(testcase): pytest.skip() return main_src = "\n".join(testcase.input) main_path = os.path.join(test_temp_dir, "main") with open(main_path, "w", encoding="utf8") as f: f.write(main_src) options = self.get_options(main_src, testcase, build_cache=False) if options.python_version > sys.version_info: pytest.skip("Test case requires a newer Python version") build_options = self.get_options(main_src, testcase, build_cache=True) server = Server(options, DEFAULT_STATUS_FILE) num_regular_incremental_steps = self.get_build_steps(main_src) step = 1 sources = self.parse_sources(main_src, step, options) if step <= num_regular_incremental_steps: messages = self.build(build_options, sources) else: messages = self.run_check(server, sources) a = [] if messages: a.extend(normalize_messages(messages)) assert testcase.tmpdir a.extend(self.maybe_suggest(step, server, main_src, testcase.tmpdir.name)) a.extend(self.maybe_inspect(step, server, main_src)) if server.fine_grained_manager: if CHECK_CONSISTENCY: check_consistency(server.fine_grained_manager) steps = testcase.find_steps() all_triggered = [] for operations in steps: step += 1 output, triggered = self.perform_step( operations, server, options, build_options, testcase, main_src, step, num_regular_incremental_steps, ) a.append("==") a.extend(output) all_triggered.extend(triggered) # Normalize paths in test output (for Windows). a = [line.replace("\\", "/") for line in a] assert_string_arrays_equal( testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})" ) if testcase.triggered: assert_string_arrays_equal( testcase.triggered, self.format_triggered(all_triggered), f"Invalid active triggers ({testcase.file}, line {testcase.line})", ) def get_options(self, source: str, testcase: DataDrivenTestCase, build_cache: bool) -> Options: # This handles things like '# flags: --foo'. options = parse_options(source, testcase, incremental_step=1) options.incremental = True options.use_builtins_fixtures = True options.show_traceback = True options.error_summary = False options.fine_grained_incremental = not build_cache options.use_fine_grained_cache = self.use_cache and not build_cache options.cache_fine_grained = self.use_cache options.local_partial_types = True options.export_types = "inspect" in testcase.file # Treat empty bodies safely for these test cases. options.allow_empty_bodies = not testcase.name.endswith("_no_empty") if re.search("flags:.*--follow-imports", source) is None: # Override the default for follow_imports options.follow_imports = "error" for name, _ in testcase.files: if "mypy.ini" in name or "pyproject.toml" in name: parse_config_file(options, lambda: None, name) break return options def run_check(self, server: Server, sources: list[BuildSource]) -> list[str]: response = server.check(sources, export_types=False, is_tty=False, terminal_width=-1) out = response["out"] or response["err"] assert isinstance(out, str) return out.splitlines() def build(self, options: Options, sources: list[BuildSource]) -> list[str]: try: result = build.build(sources=sources, options=options) except CompileError as e: return e.messages return result.errors def format_triggered(self, triggered: list[list[str]]) -> list[str]: result = [] for n, triggers in enumerate(triggered): filtered = [trigger for trigger in triggers if not trigger.endswith("__>")] filtered = sorted(filtered) result.append(("%d: %s" % (n + 2, ", ".join(filtered))).strip()) return result def get_build_steps(self, program_text: str) -> int: """Get the number of regular incremental steps to run, from the test source""" if not self.use_cache: return 0 m = re.search("# num_build_steps: ([0-9]+)$", program_text, flags=re.MULTILINE) if m is not None: return int(m.group(1)) return 1 def perform_step( self, operations: list[UpdateFile | DeleteFile], server: Server, options: Options, build_options: Options, testcase: DataDrivenTestCase, main_src: str, step: int, num_regular_incremental_steps: int, ) -> tuple[list[str], list[list[str]]]: """Perform one fine-grained incremental build step (after some file updates/deletions). Return (mypy output, triggered targets). """ perform_file_operations(operations) sources = self.parse_sources(main_src, step, options) if step <= num_regular_incremental_steps: new_messages = self.build(build_options, sources) else: new_messages = self.run_check(server, sources) updated: list[str] = [] changed: list[str] = [] targets: list[str] = [] triggered = [] if server.fine_grained_manager: if CHECK_CONSISTENCY: check_consistency(server.fine_grained_manager) triggered.append(server.fine_grained_manager.triggered) updated = server.fine_grained_manager.updated_modules changed = [mod for mod, file in server.fine_grained_manager.changed_modules] targets = server.fine_grained_manager.processed_targets expected_stale = testcase.expected_stale_modules.get(step - 1) if expected_stale is not None: assert_module_equivalence("stale" + str(step - 1), expected_stale, changed) expected_rechecked = testcase.expected_rechecked_modules.get(step - 1) if expected_rechecked is not None: assert_module_equivalence("rechecked" + str(step - 1), expected_rechecked, updated) expected = testcase.expected_fine_grained_targets.get(step) if expected: assert_target_equivalence("targets" + str(step), expected, targets) new_messages = normalize_messages(new_messages) a = new_messages assert testcase.tmpdir a.extend(self.maybe_suggest(step, server, main_src, testcase.tmpdir.name)) a.extend(self.maybe_inspect(step, server, main_src)) return a, triggered def parse_sources( self, program_text: str, incremental_step: int, options: Options ) -> list[BuildSource]: """Return target BuildSources for a test case. Normally, the unit tests will check all files included in the test case. This differs from how testcheck works by default, as dmypy doesn't currently support following imports. You can override this behavior and instruct the tests to check multiple modules by using a comment like this in the test case input: # cmd: main a.py You can also use `# cmdN:` to have a different cmd for incremental step N (2, 3, ...). """ m = re.search("# cmd: mypy ([a-zA-Z0-9_./ ]+)$", program_text, flags=re.MULTILINE) regex = f"# cmd{incremental_step}: mypy ([a-zA-Z0-9_./ ]+)$" alt_m = re.search(regex, program_text, flags=re.MULTILINE) if alt_m is not None: # Optionally return a different command if in a later step # of incremental mode, otherwise default to reusing the # original cmd. m = alt_m if m: # The test case wants to use a non-default set of files. paths = [os.path.join(test_temp_dir, path) for path in m.group(1).strip().split()] return create_source_list(paths, options) else: base = BuildSource(os.path.join(test_temp_dir, "main"), "__main__", None) # Use expand_dir instead of create_source_list to avoid complaints # when there aren't any .py files in an increment return [base] + create_source_list([test_temp_dir], options, allow_empty_dir=True) def maybe_suggest(self, step: int, server: Server, src: str, tmp_dir: str) -> list[str]: output: list[str] = [] targets = self.get_suggest(src, step) for flags, target in targets: json = "--json" in flags callsites = "--callsites" in flags no_any = "--no-any" in flags no_errors = "--no-errors" in flags m = re.match("--flex-any=([0-9.]+)", flags) flex_any = float(m.group(1)) if m else None m = re.match(r"--use-fixme=(\w+)", flags) use_fixme = m.group(1) if m else None m = re.match("--max-guesses=([0-9]+)", flags) max_guesses = int(m.group(1)) if m else None res: dict[str, Any] = server.cmd_suggest( target.strip(), json=json, no_any=no_any, no_errors=no_errors, flex_any=flex_any, use_fixme=use_fixme, callsites=callsites, max_guesses=max_guesses, ) val = res["error"] if "error" in res else res["out"] + res["err"] if json: # JSON contains already escaped \ on Windows, so requires a bit of care. val = val.replace("\\\\", "\\") val = val.replace(os.path.realpath(tmp_dir) + os.path.sep, "") val = val.replace(os.path.abspath(tmp_dir) + os.path.sep, "") output.extend(val.strip().split("\n")) return normalize_messages(output) def maybe_inspect(self, step: int, server: Server, src: str) -> list[str]: output: list[str] = [] targets = self.get_inspect(src, step) for flags, location in targets: m = re.match(r"--show=(\w+)", flags) show = m.group(1) if m else "type" verbosity = 0 if "-v" in flags: verbosity = 1 if "-vv" in flags: verbosity = 2 m = re.match(r"--limit=([0-9]+)", flags) limit = int(m.group(1)) if m else 0 include_span = "--include-span" in flags include_kind = "--include-kind" in flags include_object_attrs = "--include-object-attrs" in flags union_attrs = "--union-attrs" in flags force_reload = "--force-reload" in flags res: dict[str, Any] = server.cmd_inspect( show, location, verbosity=verbosity, limit=limit, include_span=include_span, include_kind=include_kind, include_object_attrs=include_object_attrs, union_attrs=union_attrs, force_reload=force_reload, ) val = res["error"] if "error" in res else res["out"] + res["err"] output.extend(val.strip().split("\n")) return output def get_suggest(self, program_text: str, incremental_step: int) -> list[tuple[str, str]]: step_bit = "1?" if incremental_step == 1 else str(incremental_step) regex = f"# suggest{step_bit}: (--[a-zA-Z0-9_\\-./=?^ ]+ )*([a-zA-Z0-9_.:/?^ ]+)$" m = re.findall(regex, program_text, flags=re.MULTILINE) return m def get_inspect(self, program_text: str, incremental_step: int) -> list[tuple[str, str]]: step_bit = "1?" if incremental_step == 1 else str(incremental_step) regex = f"# inspect{step_bit}: (--[a-zA-Z0-9_\\-=?^ ]+ )*([a-zA-Z0-9_.:/?^ ]+)$" m = re.findall(regex, program_text, flags=re.MULTILINE) return m def normalize_messages(messages: list[str]) -> list[str]: return [re.sub("^tmp" + re.escape(os.sep), "", message) for message in messages] class TestMessageSorting(unittest.TestCase): def test_simple_sorting(self) -> None: msgs = ['x.py:1: error: "int" not callable', 'foo/y.py:123: note: "X" not defined'] old_msgs = ['foo/y.py:12: note: "Y" not defined', 'x.py:8: error: "str" not callable'] assert sort_messages_preserving_file_order(msgs, old_msgs) == list(reversed(msgs)) assert sort_messages_preserving_file_order(list(reversed(msgs)), old_msgs) == list( reversed(msgs) ) def test_long_form_sorting(self) -> None: # Multi-line errors should be sorted together and not split. msg1 = [ 'x.py:1: error: "int" not callable', "and message continues (x: y)", " 1()", " ^~~", ] msg2 = [ 'foo/y.py: In function "f":', 'foo/y.py:123: note: "X" not defined', "and again message continues", ] old_msgs = ['foo/y.py:12: note: "Y" not defined', 'x.py:8: error: "str" not callable'] assert sort_messages_preserving_file_order(msg1 + msg2, old_msgs) == msg2 + msg1 assert sort_messages_preserving_file_order(msg2 + msg1, old_msgs) == msg2 + msg1 def test_mypy_error_prefix(self) -> None: # Some errors don't have a file and start with "mypy: ". These # shouldn't be sorted together with file-specific errors. msg1 = 'x.py:1: error: "int" not callable' msg2 = 'foo/y:123: note: "X" not defined' msg3 = "mypy: Error not associated with a file" old_msgs = [ "mypy: Something wrong", 'foo/y:12: note: "Y" not defined', 'x.py:8: error: "str" not callable', ] assert sort_messages_preserving_file_order([msg1, msg2, msg3], old_msgs) == [ msg2, msg1, msg3, ] assert sort_messages_preserving_file_order([msg3, msg2, msg1], old_msgs) == [ msg2, msg1, msg3, ] def test_new_file_at_the_end(self) -> None: msg1 = 'x.py:1: error: "int" not callable' msg2 = 'foo/y.py:123: note: "X" not defined' new1 = "ab.py:3: error: Problem: error" new2 = "aaa:3: error: Bad" old_msgs = ['foo/y.py:12: note: "Y" not defined', 'x.py:8: error: "str" not callable'] assert sort_messages_preserving_file_order([msg1, msg2, new1], old_msgs) == [ msg2, msg1, new1, ] assert sort_messages_preserving_file_order([new1, msg1, msg2, new2], old_msgs) == [ msg2, msg1, new1, new2, ]
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testfinegrained.py
Python
NOASSERTION
17,781
"""Tests for fine-grained incremental checking using the cache. All of the real code for this lives in testfinegrained.py. """ # We can't "import FineGrainedSuite from ..." because that will cause pytest # to collect the non-caching tests when running this file. from __future__ import annotations import mypy.test.testfinegrained class FineGrainedCacheSuite(mypy.test.testfinegrained.FineGrainedSuite): use_cache = True test_name_suffix = "_cached" files = mypy.test.testfinegrained.FineGrainedSuite.files + [ "fine-grained-cache-incremental.test" ]
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testfinegrainedcache.py
Python
NOASSERTION
580
from __future__ import annotations from unittest import TestCase, main from mypy.util import split_words, trim_source_line class FancyErrorFormattingTestCases(TestCase): def test_trim_source(self) -> None: assert trim_source_line("0123456789abcdef", max_len=16, col=5, min_width=2) == ( "0123456789abcdef", 0, ) # Locations near start. assert trim_source_line("0123456789abcdef", max_len=7, col=0, min_width=2) == ( "0123456...", 0, ) assert trim_source_line("0123456789abcdef", max_len=7, col=4, min_width=2) == ( "0123456...", 0, ) # Middle locations. assert trim_source_line("0123456789abcdef", max_len=7, col=5, min_width=2) == ( "...1234567...", -2, ) assert trim_source_line("0123456789abcdef", max_len=7, col=6, min_width=2) == ( "...2345678...", -1, ) assert trim_source_line("0123456789abcdef", max_len=7, col=8, min_width=2) == ( "...456789a...", 1, ) # Locations near the end. assert trim_source_line("0123456789abcdef", max_len=7, col=11, min_width=2) == ( "...789abcd...", 4, ) assert trim_source_line("0123456789abcdef", max_len=7, col=13, min_width=2) == ( "...9abcdef", 6, ) assert trim_source_line("0123456789abcdef", max_len=7, col=15, min_width=2) == ( "...9abcdef", 6, ) def test_split_words(self) -> None: assert split_words("Simple message") == ["Simple", "message"] assert split_words('Message with "Some[Long, Types]" in it') == [ "Message", "with", '"Some[Long, Types]"', "in", "it", ] assert split_words('Message with "Some[Long, Types]" and [error-code]') == [ "Message", "with", '"Some[Long, Types]"', "and", "[error-code]", ] assert split_words('"Type[Stands, First]" then words') == [ '"Type[Stands, First]"', "then", "words", ] assert split_words('First words "Then[Stands, Type]"') == [ "First", "words", '"Then[Stands, Type]"', ] assert split_words('"Type[Only, Here]"') == ['"Type[Only, Here]"'] assert split_words("OneWord") == ["OneWord"] assert split_words(" ") == ["", ""] if __name__ == "__main__": main()
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testformatter.py
Python
NOASSERTION
2,639
"""Unit tests for file system cache.""" from __future__ import annotations import os import shutil import sys import tempfile import unittest from mypy.fscache import FileSystemCache class TestFileSystemCache(unittest.TestCase): def setUp(self) -> None: self.tempdir = tempfile.mkdtemp() self.oldcwd = os.getcwd() os.chdir(self.tempdir) self.fscache = FileSystemCache() def tearDown(self) -> None: os.chdir(self.oldcwd) shutil.rmtree(self.tempdir) def test_isfile_case_1(self) -> None: self.make_file("bar.py") self.make_file("pkg/sub_package/__init__.py") self.make_file("pkg/sub_package/foo.py") # Run twice to test both cached and non-cached code paths. for i in range(2): assert self.isfile_case("bar.py") assert self.isfile_case("pkg/sub_package/__init__.py") assert self.isfile_case("pkg/sub_package/foo.py") assert not self.isfile_case("non_existent.py") assert not self.isfile_case("pkg/non_existent.py") assert not self.isfile_case("pkg/") assert not self.isfile_case("bar.py/") for i in range(2): assert not self.isfile_case("Bar.py") assert not self.isfile_case("pkg/sub_package/__init__.PY") assert not self.isfile_case("pkg/Sub_Package/foo.py") assert not self.isfile_case("Pkg/sub_package/foo.py") def test_isfile_case_2(self) -> None: self.make_file("bar.py") self.make_file("pkg/sub_package/__init__.py") self.make_file("pkg/sub_package/foo.py") # Run twice to test both cached and non-cached code paths. # This reverses the order of checks from test_isfile_case_1. for i in range(2): assert not self.isfile_case("Bar.py") assert not self.isfile_case("pkg/sub_package/__init__.PY") assert not self.isfile_case("pkg/Sub_Package/foo.py") assert not self.isfile_case("Pkg/sub_package/foo.py") for i in range(2): assert self.isfile_case("bar.py") assert self.isfile_case("pkg/sub_package/__init__.py") assert self.isfile_case("pkg/sub_package/foo.py") assert not self.isfile_case("non_existent.py") assert not self.isfile_case("pkg/non_existent.py") def test_isfile_case_3(self) -> None: self.make_file("bar.py") self.make_file("pkg/sub_package/__init__.py") self.make_file("pkg/sub_package/foo.py") # Run twice to test both cached and non-cached code paths. for i in range(2): assert self.isfile_case("bar.py") assert not self.isfile_case("non_existent.py") assert not self.isfile_case("pkg/non_existent.py") assert not self.isfile_case("Bar.py") assert not self.isfile_case("pkg/sub_package/__init__.PY") assert not self.isfile_case("pkg/Sub_Package/foo.py") assert not self.isfile_case("Pkg/sub_package/foo.py") assert self.isfile_case("pkg/sub_package/__init__.py") assert self.isfile_case("pkg/sub_package/foo.py") def test_isfile_case_other_directory(self) -> None: self.make_file("bar.py") with tempfile.TemporaryDirectory() as other: self.make_file("other_dir.py", base=other) self.make_file("pkg/other_dir.py", base=other) assert self.isfile_case(os.path.join(other, "other_dir.py")) assert not self.isfile_case(os.path.join(other, "Other_Dir.py")) assert not self.isfile_case(os.path.join(other, "bar.py")) if sys.platform in ("win32", "darwin"): # We only check case for directories under our prefix, and since # this path is not under the prefix, case difference is fine. assert self.isfile_case(os.path.join(other, "PKG/other_dir.py")) def make_file(self, path: str, base: str | None = None) -> None: if base is None: base = self.tempdir fullpath = os.path.join(base, path) os.makedirs(os.path.dirname(fullpath), exist_ok=True) if not path.endswith("/"): with open(fullpath, "w") as f: f.write("# test file") def isfile_case(self, path: str) -> bool: return self.fscache.isfile_case(os.path.join(self.tempdir, path), self.tempdir)
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testfscache.py
Python
NOASSERTION
4,456
"""Test cases for graph processing code in build.py.""" from __future__ import annotations import sys from typing import AbstractSet from mypy.build import BuildManager, BuildSourceSet, State, order_ascc, sorted_components from mypy.errors import Errors from mypy.fscache import FileSystemCache from mypy.graph_utils import strongly_connected_components, topsort from mypy.modulefinder import SearchPaths from mypy.options import Options from mypy.plugin import Plugin from mypy.report import Reports from mypy.test.helpers import Suite, assert_equal from mypy.version import __version__ class GraphSuite(Suite): def test_topsort(self) -> None: a = frozenset({"A"}) b = frozenset({"B"}) c = frozenset({"C"}) d = frozenset({"D"}) data: dict[AbstractSet[str], set[AbstractSet[str]]] = {a: {b, c}, b: {d}, c: {d}} res = list(topsort(data)) assert_equal(res, [{d}, {b, c}, {a}]) def test_scc(self) -> None: vertices = {"A", "B", "C", "D"} edges: dict[str, list[str]] = {"A": ["B", "C"], "B": ["C"], "C": ["B", "D"], "D": []} sccs = {frozenset(x) for x in strongly_connected_components(vertices, edges)} assert_equal(sccs, {frozenset({"A"}), frozenset({"B", "C"}), frozenset({"D"})}) def _make_manager(self) -> BuildManager: options = Options() options.use_builtins_fixtures = True errors = Errors(options) fscache = FileSystemCache() search_paths = SearchPaths((), (), (), ()) manager = BuildManager( data_dir="", search_paths=search_paths, ignore_prefix="", source_set=BuildSourceSet([]), reports=Reports("", {}), options=options, version_id=__version__, plugin=Plugin(options), plugins_snapshot={}, errors=errors, flush_errors=lambda filename, msgs, serious: None, fscache=fscache, stdout=sys.stdout, stderr=sys.stderr, ) return manager def test_sorted_components(self) -> None: manager = self._make_manager() graph = { "a": State("a", None, "import b, c", manager), "d": State("d", None, "pass", manager), "b": State("b", None, "import c", manager), "c": State("c", None, "import b, d", manager), } res = sorted_components(graph) assert_equal(res, [frozenset({"d"}), frozenset({"c", "b"}), frozenset({"a"})]) def test_order_ascc(self) -> None: manager = self._make_manager() graph = { "a": State("a", None, "import b, c", manager), "d": State("d", None, "def f(): import a", manager), "b": State("b", None, "import c", manager), "c": State("c", None, "import b, d", manager), } res = sorted_components(graph) assert_equal(res, [frozenset({"a", "d", "c", "b"})]) ascc = res[0] scc = order_ascc(graph, ascc) assert_equal(scc, ["d", "c", "b", "a"])
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testgraph.py
Python
NOASSERTION
3,094
"""Test cases for type inference helper functions.""" from __future__ import annotations from mypy.argmap import map_actuals_to_formals from mypy.checker import DisjointDict, group_comparison_operands from mypy.literals import Key from mypy.nodes import ARG_NAMED, ARG_OPT, ARG_POS, ARG_STAR, ARG_STAR2, ArgKind, NameExpr from mypy.test.helpers import Suite, assert_equal from mypy.test.typefixture import TypeFixture from mypy.types import AnyType, TupleType, Type, TypeOfAny class MapActualsToFormalsSuite(Suite): """Test cases for argmap.map_actuals_to_formals.""" def test_basic(self) -> None: self.assert_map([], [], []) def test_positional_only(self) -> None: self.assert_map([ARG_POS], [ARG_POS], [[0]]) self.assert_map([ARG_POS, ARG_POS], [ARG_POS, ARG_POS], [[0], [1]]) def test_optional(self) -> None: self.assert_map([], [ARG_OPT], [[]]) self.assert_map([ARG_POS], [ARG_OPT], [[0]]) self.assert_map([ARG_POS], [ARG_OPT, ARG_OPT], [[0], []]) def test_callee_star(self) -> None: self.assert_map([], [ARG_STAR], [[]]) self.assert_map([ARG_POS], [ARG_STAR], [[0]]) self.assert_map([ARG_POS, ARG_POS], [ARG_STAR], [[0, 1]]) def test_caller_star(self) -> None: self.assert_map([ARG_STAR], [ARG_STAR], [[0]]) self.assert_map([ARG_POS, ARG_STAR], [ARG_STAR], [[0, 1]]) self.assert_map([ARG_STAR], [ARG_POS, ARG_STAR], [[0], [0]]) self.assert_map([ARG_STAR], [ARG_OPT, ARG_STAR], [[0], [0]]) def test_too_many_caller_args(self) -> None: self.assert_map([ARG_POS], [], []) self.assert_map([ARG_STAR], [], []) self.assert_map([ARG_STAR], [ARG_POS], [[0]]) def test_tuple_star(self) -> None: any_type = AnyType(TypeOfAny.special_form) self.assert_vararg_map([ARG_STAR], [ARG_POS], [[0]], self.make_tuple(any_type)) self.assert_vararg_map( [ARG_STAR], [ARG_POS, ARG_POS], [[0], [0]], self.make_tuple(any_type, any_type) ) self.assert_vararg_map( [ARG_STAR], [ARG_POS, ARG_OPT, ARG_OPT], [[0], [0], []], self.make_tuple(any_type, any_type), ) def make_tuple(self, *args: Type) -> TupleType: return TupleType(list(args), TypeFixture().std_tuple) def test_named_args(self) -> None: self.assert_map(["x"], [(ARG_POS, "x")], [[0]]) self.assert_map(["y", "x"], [(ARG_POS, "x"), (ARG_POS, "y")], [[1], [0]]) def test_some_named_args(self) -> None: self.assert_map(["y"], [(ARG_OPT, "x"), (ARG_OPT, "y"), (ARG_OPT, "z")], [[], [0], []]) def test_missing_named_arg(self) -> None: self.assert_map(["y"], [(ARG_OPT, "x")], [[]]) def test_duplicate_named_arg(self) -> None: self.assert_map(["x", "x"], [(ARG_OPT, "x")], [[0, 1]]) def test_varargs_and_bare_asterisk(self) -> None: self.assert_map([ARG_STAR], [ARG_STAR, (ARG_NAMED, "x")], [[0], []]) self.assert_map([ARG_STAR, "x"], [ARG_STAR, (ARG_NAMED, "x")], [[0], [1]]) def test_keyword_varargs(self) -> None: self.assert_map(["x"], [ARG_STAR2], [[0]]) self.assert_map(["x", ARG_STAR2], [ARG_STAR2], [[0, 1]]) self.assert_map(["x", ARG_STAR2], [(ARG_POS, "x"), ARG_STAR2], [[0], [1]]) self.assert_map([ARG_POS, ARG_STAR2], [(ARG_POS, "x"), ARG_STAR2], [[0], [1]]) def test_both_kinds_of_varargs(self) -> None: self.assert_map([ARG_STAR, ARG_STAR2], [(ARG_POS, "x"), (ARG_POS, "y")], [[0, 1], [0, 1]]) def test_special_cases(self) -> None: self.assert_map([ARG_STAR], [ARG_STAR, ARG_STAR2], [[0], []]) self.assert_map([ARG_STAR, ARG_STAR2], [ARG_STAR, ARG_STAR2], [[0], [1]]) self.assert_map([ARG_STAR2], [(ARG_POS, "x"), ARG_STAR2], [[0], [0]]) self.assert_map([ARG_STAR2], [ARG_STAR2], [[0]]) def assert_map( self, caller_kinds_: list[ArgKind | str], callee_kinds_: list[ArgKind | tuple[ArgKind, str]], expected: list[list[int]], ) -> None: caller_kinds, caller_names = expand_caller_kinds(caller_kinds_) callee_kinds, callee_names = expand_callee_kinds(callee_kinds_) result = map_actuals_to_formals( caller_kinds, caller_names, callee_kinds, callee_names, lambda i: AnyType(TypeOfAny.special_form), ) assert_equal(result, expected) def assert_vararg_map( self, caller_kinds: list[ArgKind], callee_kinds: list[ArgKind], expected: list[list[int]], vararg_type: Type, ) -> None: result = map_actuals_to_formals(caller_kinds, [], callee_kinds, [], lambda i: vararg_type) assert_equal(result, expected) def expand_caller_kinds( kinds_or_names: list[ArgKind | str], ) -> tuple[list[ArgKind], list[str | None]]: kinds = [] names: list[str | None] = [] for k in kinds_or_names: if isinstance(k, str): kinds.append(ARG_NAMED) names.append(k) else: kinds.append(k) names.append(None) return kinds, names def expand_callee_kinds( kinds_and_names: list[ArgKind | tuple[ArgKind, str]] ) -> tuple[list[ArgKind], list[str | None]]: kinds = [] names: list[str | None] = [] for v in kinds_and_names: if isinstance(v, tuple): kinds.append(v[0]) names.append(v[1]) else: kinds.append(v) names.append(None) return kinds, names class OperandDisjointDictSuite(Suite): """Test cases for checker.DisjointDict, which is used for type inference with operands.""" def new(self) -> DisjointDict[int, str]: return DisjointDict() def test_independent_maps(self) -> None: d = self.new() d.add_mapping({0, 1}, {"group1"}) d.add_mapping({2, 3, 4}, {"group2"}) d.add_mapping({5, 6, 7}, {"group3"}) self.assertEqual( d.items(), [({0, 1}, {"group1"}), ({2, 3, 4}, {"group2"}), ({5, 6, 7}, {"group3"})] ) def test_partial_merging(self) -> None: d = self.new() d.add_mapping({0, 1}, {"group1"}) d.add_mapping({1, 2}, {"group2"}) d.add_mapping({3, 4}, {"group3"}) d.add_mapping({5, 0}, {"group4"}) d.add_mapping({5, 6}, {"group5"}) d.add_mapping({4, 7}, {"group6"}) self.assertEqual( d.items(), [ ({0, 1, 2, 5, 6}, {"group1", "group2", "group4", "group5"}), ({3, 4, 7}, {"group3", "group6"}), ], ) def test_full_merging(self) -> None: d = self.new() d.add_mapping({0, 1, 2}, {"a"}) d.add_mapping({3, 4, 2}, {"b"}) d.add_mapping({10, 11, 12}, {"c"}) d.add_mapping({13, 14, 15}, {"d"}) d.add_mapping({14, 10, 16}, {"e"}) d.add_mapping({0, 10}, {"f"}) self.assertEqual( d.items(), [({0, 1, 2, 3, 4, 10, 11, 12, 13, 14, 15, 16}, {"a", "b", "c", "d", "e", "f"})], ) def test_merge_with_multiple_overlaps(self) -> None: d = self.new() d.add_mapping({0, 1, 2}, {"a"}) d.add_mapping({3, 4, 5}, {"b"}) d.add_mapping({1, 2, 4, 5}, {"c"}) d.add_mapping({6, 1, 2, 4, 5}, {"d"}) d.add_mapping({6, 1, 2, 4, 5}, {"e"}) self.assertEqual(d.items(), [({0, 1, 2, 3, 4, 5, 6}, {"a", "b", "c", "d", "e"})]) class OperandComparisonGroupingSuite(Suite): """Test cases for checker.group_comparison_operands.""" def literal_keymap(self, assignable_operands: dict[int, NameExpr]) -> dict[int, Key]: output: dict[int, Key] = {} for index, expr in assignable_operands.items(): output[index] = ("FakeExpr", expr.name) return output def test_basic_cases(self) -> None: # Note: the grouping function doesn't actually inspect the input exprs, so we # just default to using NameExprs for simplicity. x0 = NameExpr("x0") x1 = NameExpr("x1") x2 = NameExpr("x2") x3 = NameExpr("x3") x4 = NameExpr("x4") basic_input = [("==", x0, x1), ("==", x1, x2), ("<", x2, x3), ("==", x3, x4)] none_assignable = self.literal_keymap({}) all_assignable = self.literal_keymap({0: x0, 1: x1, 2: x2, 3: x3, 4: x4}) for assignable in [none_assignable, all_assignable]: self.assertEqual( group_comparison_operands(basic_input, assignable, set()), [("==", [0, 1]), ("==", [1, 2]), ("<", [2, 3]), ("==", [3, 4])], ) self.assertEqual( group_comparison_operands(basic_input, assignable, {"=="}), [("==", [0, 1, 2]), ("<", [2, 3]), ("==", [3, 4])], ) self.assertEqual( group_comparison_operands(basic_input, assignable, {"<"}), [("==", [0, 1]), ("==", [1, 2]), ("<", [2, 3]), ("==", [3, 4])], ) self.assertEqual( group_comparison_operands(basic_input, assignable, {"==", "<"}), [("==", [0, 1, 2]), ("<", [2, 3]), ("==", [3, 4])], ) def test_multiple_groups(self) -> None: x0 = NameExpr("x0") x1 = NameExpr("x1") x2 = NameExpr("x2") x3 = NameExpr("x3") x4 = NameExpr("x4") x5 = NameExpr("x5") self.assertEqual( group_comparison_operands( [("==", x0, x1), ("==", x1, x2), ("is", x2, x3), ("is", x3, x4)], self.literal_keymap({}), {"==", "is"}, ), [("==", [0, 1, 2]), ("is", [2, 3, 4])], ) self.assertEqual( group_comparison_operands( [("==", x0, x1), ("==", x1, x2), ("==", x2, x3), ("==", x3, x4)], self.literal_keymap({}), {"==", "is"}, ), [("==", [0, 1, 2, 3, 4])], ) self.assertEqual( group_comparison_operands( [("is", x0, x1), ("==", x1, x2), ("==", x2, x3), ("==", x3, x4)], self.literal_keymap({}), {"==", "is"}, ), [("is", [0, 1]), ("==", [1, 2, 3, 4])], ) self.assertEqual( group_comparison_operands( [("is", x0, x1), ("is", x1, x2), ("<", x2, x3), ("==", x3, x4), ("==", x4, x5)], self.literal_keymap({}), {"==", "is"}, ), [("is", [0, 1, 2]), ("<", [2, 3]), ("==", [3, 4, 5])], ) def test_multiple_groups_coalescing(self) -> None: x0 = NameExpr("x0") x1 = NameExpr("x1") x2 = NameExpr("x2") x3 = NameExpr("x3") x4 = NameExpr("x4") nothing_combined = [("==", [0, 1, 2]), ("<", [2, 3]), ("==", [3, 4, 5])] everything_combined = [("==", [0, 1, 2, 3, 4, 5]), ("<", [2, 3])] # Note: We do 'x4 == x0' at the very end! two_groups = [ ("==", x0, x1), ("==", x1, x2), ("<", x2, x3), ("==", x3, x4), ("==", x4, x0), ] self.assertEqual( group_comparison_operands( two_groups, self.literal_keymap({0: x0, 1: x1, 2: x2, 3: x3, 4: x4, 5: x0}), {"=="} ), everything_combined, "All vars are assignable, everything is combined", ) self.assertEqual( group_comparison_operands( two_groups, self.literal_keymap({1: x1, 2: x2, 3: x3, 4: x4}), {"=="} ), nothing_combined, "x0 is unassignable, so no combining", ) self.assertEqual( group_comparison_operands( two_groups, self.literal_keymap({0: x0, 1: x1, 3: x3, 5: x0}), {"=="} ), everything_combined, "Some vars are unassignable but x0 is, so we combine", ) self.assertEqual( group_comparison_operands(two_groups, self.literal_keymap({0: x0, 5: x0}), {"=="}), everything_combined, "All vars are unassignable but x0 is, so we combine", ) def test_multiple_groups_different_operators(self) -> None: x0 = NameExpr("x0") x1 = NameExpr("x1") x2 = NameExpr("x2") x3 = NameExpr("x3") groups = [("==", x0, x1), ("==", x1, x2), ("is", x2, x3), ("is", x3, x0)] keymap = self.literal_keymap({0: x0, 1: x1, 2: x2, 3: x3, 4: x0}) self.assertEqual( group_comparison_operands(groups, keymap, {"==", "is"}), [("==", [0, 1, 2]), ("is", [2, 3, 4])], "Different operators can never be combined", ) def test_single_pair(self) -> None: x0 = NameExpr("x0") x1 = NameExpr("x1") single_comparison = [("==", x0, x1)] expected_output = [("==", [0, 1])] assignable_combinations: list[dict[int, NameExpr]] = [{}, {0: x0}, {1: x1}, {0: x0, 1: x1}] to_group_by: list[set[str]] = [set(), {"=="}, {"is"}] for combo in assignable_combinations: for operators in to_group_by: keymap = self.literal_keymap(combo) self.assertEqual( group_comparison_operands(single_comparison, keymap, operators), expected_output, ) def test_empty_pair_list(self) -> None: # This case should never occur in practice -- ComparisionExprs # always contain at least one comparison. But in case it does... self.assertEqual(group_comparison_operands([], {}, set()), []) self.assertEqual(group_comparison_operands([], {}, {"=="}), [])
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testinfer.py
Python
NOASSERTION
13,856
from __future__ import annotations import sys import time from multiprocessing import Queue, get_context from unittest import TestCase, main import pytest from mypy.ipc import IPCClient, IPCServer CONNECTION_NAME = "dmypy-test-ipc" def server(msg: str, q: Queue[str]) -> None: server = IPCServer(CONNECTION_NAME) q.put(server.connection_name) data = "" while not data: with server: server.write(msg) data = server.read() server.cleanup() def server_multi_message_echo(q: Queue[str]) -> None: server = IPCServer(CONNECTION_NAME) q.put(server.connection_name) data = "" with server: while data != "quit": data = server.read() server.write(data) server.cleanup() class IPCTests(TestCase): def setUp(self) -> None: if sys.platform == "linux": # The default "fork" start method is potentially unsafe self.ctx = get_context("forkserver") else: self.ctx = get_context("spawn") def test_transaction_large(self) -> None: queue: Queue[str] = self.ctx.Queue() msg = "t" * 200000 # longer than the max read size of 100_000 p = self.ctx.Process(target=server, args=(msg, queue), daemon=True) p.start() connection_name = queue.get() with IPCClient(connection_name, timeout=1) as client: assert client.read() == msg client.write("test") queue.close() queue.join_thread() p.join() def test_connect_twice(self) -> None: queue: Queue[str] = self.ctx.Queue() msg = "this is a test message" p = self.ctx.Process(target=server, args=(msg, queue), daemon=True) p.start() connection_name = queue.get() with IPCClient(connection_name, timeout=1) as client: assert client.read() == msg client.write("") # don't let the server hang up yet, we want to connect again. with IPCClient(connection_name, timeout=1) as client: assert client.read() == msg client.write("test") queue.close() queue.join_thread() p.join() assert p.exitcode == 0 def test_multiple_messages(self) -> None: queue: Queue[str] = self.ctx.Queue() p = self.ctx.Process(target=server_multi_message_echo, args=(queue,), daemon=True) p.start() connection_name = queue.get() with IPCClient(connection_name, timeout=1) as client: # "foo bar" with extra accents on letters. # In UTF-8 encoding so we don't confuse editors opening this file. fancy_text = b"f\xcc\xb6o\xcc\xb2\xf0\x9d\x91\x9c \xd0\xb2\xe2\xb7\xa1a\xcc\xb6r\xcc\x93\xcd\x98\xcd\x8c" client.write(fancy_text.decode("utf-8")) assert client.read() == fancy_text.decode("utf-8") client.write("Test with spaces") client.write("Test write before reading previous") time.sleep(0) # yield to the server to force reading of all messages by server. assert client.read() == "Test with spaces" assert client.read() == "Test write before reading previous" client.write("quit") assert client.read() == "quit" queue.close() queue.join_thread() p.join() assert p.exitcode == 0 # Run test_connect_twice a lot, in the hopes of finding issues. # This is really slow, so it is skipped, but can be enabled if # needed to debug IPC issues. @pytest.mark.skip def test_connect_alot(self) -> None: t0 = time.time() for i in range(1000): try: print(i, "start") self.test_connect_twice() finally: t1 = time.time() print(i, t1 - t0) sys.stdout.flush() t0 = t1 if __name__ == "__main__": main()
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testipc.py
Python
NOASSERTION
3,966
"""Test cases for AST merge (used for fine-grained incremental checking)""" from __future__ import annotations import os import shutil from mypy import build from mypy.build import BuildResult from mypy.errors import CompileError from mypy.modulefinder import BuildSource from mypy.nodes import ( UNBOUND_IMPORTED, Expression, MypyFile, Node, SymbolTable, SymbolTableNode, TypeInfo, TypeVarExpr, Var, ) from mypy.options import Options from mypy.server.subexpr import get_subexpressions from mypy.server.update import FineGrainedBuildManager from mypy.strconv import StrConv from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages, parse_options from mypy.types import Type, TypeStrVisitor from mypy.util import IdMapper, short_type # Which data structures to dump in a test case? SYMTABLE = "SYMTABLE" TYPEINFO = " TYPEINFO" TYPES = "TYPES" AST = "AST" class ASTMergeSuite(DataSuite): files = ["merge.test"] def setup(self) -> None: super().setup() self.str_conv = StrConv(show_ids=True, options=Options()) assert self.str_conv.id_mapper is not None self.id_mapper: IdMapper = self.str_conv.id_mapper self.type_str_conv = TypeStrVisitor(self.id_mapper, options=Options()) def run_case(self, testcase: DataDrivenTestCase) -> None: name = testcase.name # We use the test case name to decide which data structures to dump. # Dumping everything would result in very verbose test cases. if name.endswith("_symtable"): kind = SYMTABLE elif name.endswith("_typeinfo"): kind = TYPEINFO elif name.endswith("_types"): kind = TYPES else: kind = AST main_src = "\n".join(testcase.input) result = self.build(main_src, testcase) assert result is not None, "cases where CompileError occurred should not be run" result.manager.fscache.flush() fine_grained_manager = FineGrainedBuildManager(result) a = [] if result.errors: a.extend(result.errors) target_path = os.path.join(test_temp_dir, "target.py") shutil.copy(os.path.join(test_temp_dir, "target.py.next"), target_path) a.extend(self.dump(fine_grained_manager, kind, testcase.test_modules)) old_subexpr = get_subexpressions(result.manager.modules["target"]) a.append("==>") new_file, new_types = self.build_increment(fine_grained_manager, "target", target_path) a.extend(self.dump(fine_grained_manager, kind, testcase.test_modules)) for expr in old_subexpr: if isinstance(expr, TypeVarExpr): # These are merged so we can't perform the check. continue # Verify that old AST nodes are removed from the expression type map. assert expr not in new_types if testcase.normalize_output: a = normalize_error_messages(a) assert_string_arrays_equal( testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})" ) def build(self, source: str, testcase: DataDrivenTestCase) -> BuildResult | None: options = parse_options(source, testcase, incremental_step=1) options.incremental = True options.fine_grained_incremental = True options.use_builtins_fixtures = True options.export_types = True options.show_traceback = True options.allow_empty_bodies = True options.force_uppercase_builtins = True main_path = os.path.join(test_temp_dir, "main") self.str_conv.options = options self.type_str_conv.options = options with open(main_path, "w", encoding="utf8") as f: f.write(source) try: result = build.build( sources=[BuildSource(main_path, None, None)], options=options, alt_lib_path=test_temp_dir, ) except CompileError: # TODO: Is it okay to return None? return None return result def build_increment( self, manager: FineGrainedBuildManager, module_id: str, path: str ) -> tuple[MypyFile, dict[Expression, Type]]: manager.flush_cache() manager.update([(module_id, path)], []) module = manager.manager.modules[module_id] type_map = manager.graph[module_id].type_map() return module, type_map def dump( self, manager: FineGrainedBuildManager, kind: str, test_modules: list[str] ) -> list[str]: modules = { name: file for name, file in manager.manager.modules.items() if name in test_modules } if kind == AST: return self.dump_asts(modules) elif kind == TYPEINFO: return self.dump_typeinfos(modules) elif kind == SYMTABLE: return self.dump_symbol_tables(modules) elif kind == TYPES: return self.dump_types(modules, manager) assert False, f"Invalid kind {kind}" def dump_asts(self, modules: dict[str, MypyFile]) -> list[str]: a = [] for m in sorted(modules): s = modules[m].accept(self.str_conv) a.extend(s.splitlines()) return a def dump_symbol_tables(self, modules: dict[str, MypyFile]) -> list[str]: a = [] for id in sorted(modules): a.extend(self.dump_symbol_table(id, modules[id].names)) return a def dump_symbol_table(self, module_id: str, symtable: SymbolTable) -> list[str]: a = [f"{module_id}:"] for name in sorted(symtable): if name.startswith("__"): continue a.append(f" {name}: {self.format_symbol_table_node(symtable[name])}") return a def format_symbol_table_node(self, node: SymbolTableNode) -> str: if node.node is None: if node.kind == UNBOUND_IMPORTED: return "UNBOUND_IMPORTED" return "None" if isinstance(node.node, Node): s = f"{str(type(node.node).__name__)}<{self.id_mapper.id(node.node)}>" else: s = f"? ({type(node.node)})" if ( isinstance(node.node, Var) and node.node.type and not node.node.fullname.startswith("typing.") ): typestr = self.format_type(node.node.type) s += f"({typestr})" return s def dump_typeinfos(self, modules: dict[str, MypyFile]) -> list[str]: a = [] for id in sorted(modules): a.extend(self.dump_typeinfos_recursive(modules[id].names)) return a def dump_typeinfos_recursive(self, names: SymbolTable) -> list[str]: a = [] for name, node in sorted(names.items(), key=lambda x: x[0]): if isinstance(node.node, TypeInfo): a.extend(self.dump_typeinfo(node.node)) a.extend(self.dump_typeinfos_recursive(node.node.names)) return a def dump_typeinfo(self, info: TypeInfo) -> list[str]: if info.fullname == "enum.Enum": # Avoid noise return [] s = info.dump(str_conv=self.str_conv, type_str_conv=self.type_str_conv) return s.splitlines() def dump_types( self, modules: dict[str, MypyFile], manager: FineGrainedBuildManager ) -> list[str]: a = [] # To make the results repeatable, we try to generate unique and # deterministic sort keys. for module_id in sorted(modules): all_types = manager.manager.all_types # Compute a module type map from the global type map tree = manager.graph[module_id].tree assert tree is not None type_map = { node: all_types[node] for node in get_subexpressions(tree) if node in all_types } if type_map: a.append(f"## {module_id}") for expr in sorted( type_map, key=lambda n: ( n.line, short_type(n), n.str_with_options(self.str_conv.options) + str(type_map[n]), ), ): typ = type_map[expr] a.append(f"{short_type(expr)}:{expr.line}: {self.format_type(typ)}") return a def format_type(self, typ: Type) -> str: return typ.accept(self.type_str_conv)
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testmerge.py
Python
NOASSERTION
8,661
from __future__ import annotations import os from mypy.modulefinder import FindModuleCache, ModuleNotFoundReason, SearchPaths from mypy.options import Options from mypy.test.config import package_path from mypy.test.helpers import Suite, assert_equal data_path = os.path.relpath(os.path.join(package_path, "modulefinder")) class ModuleFinderSuite(Suite): def setUp(self) -> None: self.search_paths = SearchPaths( python_path=(), mypy_path=( os.path.join(data_path, "nsx-pkg1"), os.path.join(data_path, "nsx-pkg2"), os.path.join(data_path, "nsx-pkg3"), os.path.join(data_path, "nsy-pkg1"), os.path.join(data_path, "nsy-pkg2"), os.path.join(data_path, "pkg1"), os.path.join(data_path, "pkg2"), ), package_path=(), typeshed_path=(), ) options = Options() options.namespace_packages = True self.fmc_ns = FindModuleCache(self.search_paths, fscache=None, options=options) options = Options() options.namespace_packages = False self.fmc_nons = FindModuleCache(self.search_paths, fscache=None, options=options) def test__no_namespace_packages__nsx(self) -> None: """ If namespace_packages is False, we shouldn't find nsx """ found_module = self.fmc_nons.find_module("nsx") assert_equal(ModuleNotFoundReason.NOT_FOUND, found_module) def test__no_namespace_packages__nsx_a(self) -> None: """ If namespace_packages is False, we shouldn't find nsx.a. """ found_module = self.fmc_nons.find_module("nsx.a") assert_equal(ModuleNotFoundReason.NOT_FOUND, found_module) def test__no_namespace_packages__find_a_in_pkg1(self) -> None: """ Find find pkg1/a.py for "a" with namespace_packages False. """ found_module = self.fmc_nons.find_module("a") expected = os.path.join(data_path, "pkg1", "a.py") assert_equal(expected, found_module) def test__no_namespace_packages__find_b_in_pkg2(self) -> None: found_module = self.fmc_ns.find_module("b") expected = os.path.join(data_path, "pkg2", "b", "__init__.py") assert_equal(expected, found_module) def test__find_nsx_as_namespace_pkg_in_pkg1(self) -> None: """ There's no __init__.py in any of the nsx dirs, return the path to the first one found in mypypath. """ found_module = self.fmc_ns.find_module("nsx") expected = os.path.join(data_path, "nsx-pkg1", "nsx") assert_equal(expected, found_module) def test__find_nsx_a_init_in_pkg1(self) -> None: """ Find nsx-pkg1/nsx/a/__init__.py for "nsx.a" in namespace mode. """ found_module = self.fmc_ns.find_module("nsx.a") expected = os.path.join(data_path, "nsx-pkg1", "nsx", "a", "__init__.py") assert_equal(expected, found_module) def test__find_nsx_b_init_in_pkg2(self) -> None: """ Find nsx-pkg2/nsx/b/__init__.py for "nsx.b" in namespace mode. """ found_module = self.fmc_ns.find_module("nsx.b") expected = os.path.join(data_path, "nsx-pkg2", "nsx", "b", "__init__.py") assert_equal(expected, found_module) def test__find_nsx_c_c_in_pkg3(self) -> None: """ Find nsx-pkg3/nsx/c/c.py for "nsx.c.c" in namespace mode. """ found_module = self.fmc_ns.find_module("nsx.c.c") expected = os.path.join(data_path, "nsx-pkg3", "nsx", "c", "c.py") assert_equal(expected, found_module) def test__find_nsy_a__init_pyi(self) -> None: """ Prefer nsy-pkg1/a/__init__.pyi file over __init__.py. """ found_module = self.fmc_ns.find_module("nsy.a") expected = os.path.join(data_path, "nsy-pkg1", "nsy", "a", "__init__.pyi") assert_equal(expected, found_module) def test__find_nsy_b__init_py(self) -> None: """ There is a nsy-pkg2/nsy/b.pyi, but also a nsy-pkg2/nsy/b/__init__.py. We expect to find the latter when looking up "nsy.b" as a package is preferred over a module. """ found_module = self.fmc_ns.find_module("nsy.b") expected = os.path.join(data_path, "nsy-pkg2", "nsy", "b", "__init__.py") assert_equal(expected, found_module) def test__find_nsy_c_pyi(self) -> None: """ There is a nsy-pkg2/nsy/c.pyi and nsy-pkg2/nsy/c.py We expect to find the former when looking up "nsy.b" as .pyi is preferred over .py. """ found_module = self.fmc_ns.find_module("nsy.c") expected = os.path.join(data_path, "nsy-pkg2", "nsy", "c.pyi") assert_equal(expected, found_module) def test__find_a_in_pkg1(self) -> None: found_module = self.fmc_ns.find_module("a") expected = os.path.join(data_path, "pkg1", "a.py") assert_equal(expected, found_module) def test__find_b_init_in_pkg2(self) -> None: found_module = self.fmc_ns.find_module("b") expected = os.path.join(data_path, "pkg2", "b", "__init__.py") assert_equal(expected, found_module) def test__find_d_nowhere(self) -> None: found_module = self.fmc_ns.find_module("d") assert_equal(ModuleNotFoundReason.NOT_FOUND, found_module) class ModuleFinderSitePackagesSuite(Suite): def setUp(self) -> None: self.package_dir = os.path.relpath( os.path.join(package_path, "modulefinder-site-packages") ) package_paths = ( os.path.join(self.package_dir, "baz"), os.path.join(self.package_dir, "..", "not-a-directory"), os.path.join(self.package_dir, "..", "modulefinder-src"), self.package_dir, ) self.search_paths = SearchPaths( python_path=(), mypy_path=(os.path.join(data_path, "pkg1"),), package_path=tuple(package_paths), typeshed_path=(), ) options = Options() options.namespace_packages = True self.fmc_ns = FindModuleCache(self.search_paths, fscache=None, options=options) options = Options() options.namespace_packages = False self.fmc_nons = FindModuleCache(self.search_paths, fscache=None, options=options) def path(self, *parts: str) -> str: return os.path.join(self.package_dir, *parts) def test__packages_with_ns(self) -> None: cases = [ # Namespace package with py.typed ("ns_pkg_typed", self.path("ns_pkg_typed")), ("ns_pkg_typed.a", self.path("ns_pkg_typed", "a.py")), ("ns_pkg_typed.b", self.path("ns_pkg_typed", "b")), ("ns_pkg_typed.b.c", self.path("ns_pkg_typed", "b", "c.py")), ("ns_pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), # Namespace package without py.typed ("ns_pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Namespace package without stub package ("ns_pkg_w_stubs", self.path("ns_pkg_w_stubs")), ("ns_pkg_w_stubs.typed", self.path("ns_pkg_w_stubs-stubs", "typed", "__init__.pyi")), ( "ns_pkg_w_stubs.typed_inline", self.path("ns_pkg_w_stubs", "typed_inline", "__init__.py"), ), ("ns_pkg_w_stubs.untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Regular package with py.typed ("pkg_typed", self.path("pkg_typed", "__init__.py")), ("pkg_typed.a", self.path("pkg_typed", "a.py")), ("pkg_typed.b", self.path("pkg_typed", "b", "__init__.py")), ("pkg_typed.b.c", self.path("pkg_typed", "b", "c.py")), ("pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), # Regular package without py.typed ("pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Top-level Python file in site-packages ("standalone", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("standalone.standalone_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Packages found by following .pth files ("baz_pkg", self.path("baz", "baz_pkg", "__init__.py")), ("ns_baz_pkg.a", self.path("baz", "ns_baz_pkg", "a.py")), ("neighbor_pkg", self.path("..", "modulefinder-src", "neighbor_pkg", "__init__.py")), ("ns_neighbor_pkg.a", self.path("..", "modulefinder-src", "ns_neighbor_pkg", "a.py")), # Something that doesn't exist ("does_not_exist", ModuleNotFoundReason.NOT_FOUND), # A regular package with an installed set of stubs ("foo.bar", self.path("foo-stubs", "bar.pyi")), # A regular, non-site-packages module ("a", os.path.join(data_path, "pkg1", "a.py")), ] for module, expected in cases: template = "Find(" + module + ") got {}; expected {}" actual = self.fmc_ns.find_module(module) assert_equal(actual, expected, template) def test__packages_without_ns(self) -> None: cases = [ # Namespace package with py.typed ("ns_pkg_typed", ModuleNotFoundReason.NOT_FOUND), ("ns_pkg_typed.a", ModuleNotFoundReason.NOT_FOUND), ("ns_pkg_typed.b", ModuleNotFoundReason.NOT_FOUND), ("ns_pkg_typed.b.c", ModuleNotFoundReason.NOT_FOUND), ("ns_pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), # Namespace package without py.typed ("ns_pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Namespace package without stub package ("ns_pkg_w_stubs", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("ns_pkg_w_stubs.typed", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ( "ns_pkg_w_stubs.typed_inline", self.path("ns_pkg_w_stubs", "typed_inline", "__init__.py"), ), ("ns_pkg_w_stubs.untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Regular package with py.typed ("pkg_typed", self.path("pkg_typed", "__init__.py")), ("pkg_typed.a", self.path("pkg_typed", "a.py")), ("pkg_typed.b", self.path("pkg_typed", "b", "__init__.py")), ("pkg_typed.b.c", self.path("pkg_typed", "b", "c.py")), ("pkg_typed.a.a_var", ModuleNotFoundReason.NOT_FOUND), # Regular package without py.typed ("pkg_untyped", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.a", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.b", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.b.c", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("pkg_untyped.a.a_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Top-level Python file in site-packages ("standalone", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), ("standalone.standalone_var", ModuleNotFoundReason.FOUND_WITHOUT_TYPE_HINTS), # Packages found by following .pth files ("baz_pkg", self.path("baz", "baz_pkg", "__init__.py")), ("ns_baz_pkg.a", ModuleNotFoundReason.NOT_FOUND), ("neighbor_pkg", self.path("..", "modulefinder-src", "neighbor_pkg", "__init__.py")), ("ns_neighbor_pkg.a", ModuleNotFoundReason.NOT_FOUND), # Something that doesn't exist ("does_not_exist", ModuleNotFoundReason.NOT_FOUND), # A regular package with an installed set of stubs ("foo.bar", self.path("foo-stubs", "bar.pyi")), # A regular, non-site-packages module ("a", os.path.join(data_path, "pkg1", "a.py")), ] for module, expected in cases: template = "Find(" + module + ") got {}; expected {}" actual = self.fmc_nons.find_module(module) assert_equal(actual, expected, template)
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testmodulefinder.py
Python
NOASSERTION
13,177
"""A basic check to make sure that we are using a mypyc-compiled version when expected.""" from __future__ import annotations import os from unittest import TestCase import mypy class MypycTest(TestCase): def test_using_mypyc(self) -> None: if os.getenv("TEST_MYPYC", None) == "1": assert not mypy.__file__.endswith(".py"), "Expected to find a mypyc-compiled version"
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testmypyc.py
Python
NOASSERTION
397
"""Test cases for `--output=json`. These cannot be run by the usual unit test runner because of the backslashes in the output, which get normalized to forward slashes by the test suite on Windows. """ from __future__ import annotations import os import os.path from mypy import api from mypy.defaults import PYTHON3_VERSION from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite class OutputJSONsuite(DataSuite): files = ["outputjson.test"] def run_case(self, testcase: DataDrivenTestCase) -> None: test_output_json(testcase) def test_output_json(testcase: DataDrivenTestCase) -> None: """Runs Mypy in a subprocess, and ensures that `--output=json` works as intended.""" mypy_cmdline = ["--output=json"] mypy_cmdline.append(f"--python-version={'.'.join(map(str, PYTHON3_VERSION))}") # Write the program to a file. program_path = os.path.join(test_temp_dir, "main") mypy_cmdline.append(program_path) with open(program_path, "w", encoding="utf8") as file: for s in testcase.input: file.write(f"{s}\n") output = [] # Type check the program. out, err, returncode = api.run(mypy_cmdline) # split lines, remove newlines, and remove directory of test case for line in (out + err).rstrip("\n").splitlines(): if line.startswith(test_temp_dir + os.sep): output.append(line[len(test_temp_dir + os.sep) :].rstrip("\r\n")) else: output.append(line.rstrip("\r\n")) if returncode > 1: output.append("!!! Mypy crashed !!!") # Remove temp file. os.remove(program_path) # JSON encodes every `\` character into `\\`, so we need to remove `\\` from windows paths # and `/` from POSIX paths json_os_separator = os.sep.replace("\\", "\\\\") normalized_output = [line.replace(test_temp_dir + json_os_separator, "") for line in output] assert normalized_output == testcase.output
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testoutput.py
Python
NOASSERTION
1,980
"""Tests for the mypy parser.""" from __future__ import annotations import sys from pytest import skip from mypy import defaults from mypy.config_parser import parse_mypy_comments from mypy.errors import CompileError, Errors from mypy.options import Options from mypy.parse import parse from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, find_test_files, parse_options from mypy.util import get_mypy_comments class ParserSuite(DataSuite): required_out_section = True base_path = "." files = find_test_files(pattern="parse*.test", exclude=["parse-errors.test"]) if sys.version_info < (3, 10): files.remove("parse-python310.test") if sys.version_info < (3, 12): files.remove("parse-python312.test") def run_case(self, testcase: DataDrivenTestCase) -> None: test_parser(testcase) def test_parser(testcase: DataDrivenTestCase) -> None: """Perform a single parser test case. The argument contains the description of the test case. """ options = Options() options.force_uppercase_builtins = True options.hide_error_codes = True if testcase.file.endswith("python310.test"): options.python_version = (3, 10) elif testcase.file.endswith("python312.test"): options.python_version = (3, 12) else: options.python_version = defaults.PYTHON3_VERSION source = "\n".join(testcase.input) # Apply mypy: comments to options. comments = get_mypy_comments(source) changes, _ = parse_mypy_comments(comments, options) options = options.apply_changes(changes) try: n = parse( bytes(source, "ascii"), fnam="main", module="__main__", errors=Errors(options), options=options, raise_on_error=True, ) a = n.str_with_options(options).split("\n") except CompileError as e: a = e.messages assert_string_arrays_equal( testcase.output, a, f"Invalid parser output ({testcase.file}, line {testcase.line})" ) # The file name shown in test case output. This is displayed in error # messages, and must match the file name in the test case descriptions. INPUT_FILE_NAME = "file" class ParseErrorSuite(DataSuite): required_out_section = True base_path = "." files = ["parse-errors.test"] def run_case(self, testcase: DataDrivenTestCase) -> None: test_parse_error(testcase) def test_parse_error(testcase: DataDrivenTestCase) -> None: try: options = parse_options("\n".join(testcase.input), testcase, 0) if options.python_version != sys.version_info[:2]: skip() # Compile temporary file. The test file contains non-ASCII characters. parse( bytes("\n".join(testcase.input), "utf-8"), INPUT_FILE_NAME, "__main__", errors=Errors(options), options=options, raise_on_error=True, ) raise AssertionError("No errors reported") except CompileError as e: if e.module_with_blocker is not None: assert e.module_with_blocker == "__main__" # Verify that there was a compile error and that the error messages # are equivalent. assert_string_arrays_equal( testcase.output, e.messages, f"Invalid compiler output ({testcase.file}, line {testcase.line})", )
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testparse.py
Python
NOASSERTION
3,490
from __future__ import annotations import os import re import subprocess import sys import tempfile from contextlib import contextmanager from typing import Iterator import filelock import mypy.api from mypy.test.config import package_path, pip_lock, pip_timeout, test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, perform_file_operations # NOTE: options.use_builtins_fixtures should not be set in these # tests, otherwise mypy will ignore installed third-party packages. class PEP561Suite(DataSuite): files = ["pep561.test"] base_path = "." def run_case(self, test_case: DataDrivenTestCase) -> None: test_pep561(test_case) @contextmanager def virtualenv(python_executable: str = sys.executable) -> Iterator[tuple[str, str]]: """Context manager that creates a virtualenv in a temporary directory Returns the path to the created Python executable """ with tempfile.TemporaryDirectory() as venv_dir: proc = subprocess.run( [python_executable, "-m", "venv", venv_dir], cwd=os.getcwd(), capture_output=True ) if proc.returncode != 0: err = proc.stdout.decode("utf-8") + proc.stderr.decode("utf-8") raise Exception("Failed to create venv.\n" + err) if sys.platform == "win32": yield venv_dir, os.path.abspath(os.path.join(venv_dir, "Scripts", "python")) else: yield venv_dir, os.path.abspath(os.path.join(venv_dir, "bin", "python")) def upgrade_pip(python_executable: str) -> None: """Install pip>=21.3.1. Required for editable installs with PEP 660.""" if ( sys.version_info >= (3, 11) or (3, 10, 3) <= sys.version_info < (3, 11) or (3, 9, 11) <= sys.version_info < (3, 10) or (3, 8, 13) <= sys.version_info < (3, 9) ): # Skip for more recent Python releases which come with pip>=21.3.1 # out of the box - for performance reasons. return install_cmd = [python_executable, "-m", "pip", "install", "pip>=21.3.1"] try: with filelock.FileLock(pip_lock, timeout=pip_timeout): proc = subprocess.run(install_cmd, capture_output=True, env=os.environ) except filelock.Timeout as err: raise Exception(f"Failed to acquire {pip_lock}") from err if proc.returncode != 0: raise Exception(proc.stdout.decode("utf-8") + proc.stderr.decode("utf-8")) def install_package( pkg: str, python_executable: str = sys.executable, editable: bool = False ) -> None: """Install a package from test-data/packages/pkg/""" working_dir = os.path.join(package_path, pkg) with tempfile.TemporaryDirectory() as dir: install_cmd = [python_executable, "-m", "pip", "install"] if editable: install_cmd.append("-e") install_cmd.append(".") # Note that newer versions of pip (21.3+) don't # follow this env variable, but this is for compatibility env = {"PIP_BUILD": dir} # Inherit environment for Windows env.update(os.environ) try: with filelock.FileLock(pip_lock, timeout=pip_timeout): proc = subprocess.run(install_cmd, cwd=working_dir, capture_output=True, env=env) except filelock.Timeout as err: raise Exception(f"Failed to acquire {pip_lock}") from err if proc.returncode != 0: raise Exception(proc.stdout.decode("utf-8") + proc.stderr.decode("utf-8")) def test_pep561(testcase: DataDrivenTestCase) -> None: """Test running mypy on files that depend on PEP 561 packages.""" assert testcase.old_cwd is not None, "test was not properly set up" python = sys.executable assert python is not None, "Should be impossible" pkgs, pip_args = parse_pkgs(testcase.input[0]) mypy_args = parse_mypy_args(testcase.input[1]) editable = False for arg in pip_args: if arg == "editable": editable = True else: raise ValueError(f"Unknown pip argument: {arg}") assert pkgs, "No packages to install for PEP 561 test?" with virtualenv(python) as venv: venv_dir, python_executable = venv if editable: # Editable installs with PEP 660 require pip>=21.3 upgrade_pip(python_executable) for pkg in pkgs: install_package(pkg, python_executable, editable) cmd_line = list(mypy_args) has_program = not ("-p" in cmd_line or "--package" in cmd_line) if has_program: program = testcase.name + ".py" with open(program, "w", encoding="utf-8") as f: for s in testcase.input: f.write(f"{s}\n") cmd_line.append(program) cmd_line.extend(["--no-error-summary", "--hide-error-codes"]) if python_executable != sys.executable: cmd_line.append(f"--python-executable={python_executable}") steps = testcase.find_steps() if steps != [[]]: steps = [[]] + steps for i, operations in enumerate(steps): perform_file_operations(operations) output = [] # Type check the module out, err, returncode = mypy.api.run(cmd_line) # split lines, remove newlines, and remove directory of test case for line in (out + err).splitlines(): if line.startswith(test_temp_dir + os.sep): output.append(line[len(test_temp_dir + os.sep) :].rstrip("\r\n")) else: # Normalize paths so that the output is the same on Windows and Linux/macOS. line = line.replace(test_temp_dir + os.sep, test_temp_dir + "/") output.append(line.rstrip("\r\n")) iter_count = "" if i == 0 else f" on iteration {i + 1}" expected = testcase.output if i == 0 else testcase.output2.get(i + 1, []) assert_string_arrays_equal( expected, output, f"Invalid output ({testcase.file}, line {testcase.line}){iter_count}", ) if has_program: os.remove(program) def parse_pkgs(comment: str) -> tuple[list[str], list[str]]: if not comment.startswith("# pkgs:"): return ([], []) else: pkgs_str, *args = comment[7:].split(";") return ([pkg.strip() for pkg in pkgs_str.split(",")], [arg.strip() for arg in args]) def parse_mypy_args(line: str) -> list[str]: m = re.match("# flags: (.*)$", line) if not m: return [] # No args; mypy will spit out an error. return m.group(1).split() def test_mypy_path_is_respected() -> None: assert False packages = "packages" pkg_name = "a" with tempfile.TemporaryDirectory() as temp_dir: old_dir = os.getcwd() os.chdir(temp_dir) try: # Create the pkg for files to go into full_pkg_name = os.path.join(temp_dir, packages, pkg_name) os.makedirs(full_pkg_name) # Create the empty __init__ file to declare a package pkg_init_name = os.path.join(temp_dir, packages, pkg_name, "__init__.py") open(pkg_init_name, "w", encoding="utf8").close() mypy_config_path = os.path.join(temp_dir, "mypy.ini") with open(mypy_config_path, "w") as mypy_file: mypy_file.write("[mypy]\n") mypy_file.write(f"mypy_path = ./{packages}\n") with virtualenv() as venv: venv_dir, python_executable = venv cmd_line_args = [] if python_executable != sys.executable: cmd_line_args.append(f"--python-executable={python_executable}") cmd_line_args.extend(["--config-file", mypy_config_path, "--package", pkg_name]) out, err, returncode = mypy.api.run(cmd_line_args) assert returncode == 0 finally: os.chdir(old_dir)
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testpep561.py
Python
NOASSERTION
8,064
"""Test cases for running mypy programs using a Python interpreter. Each test case type checks a program then runs it using Python. The output (stdout) of the program is compared to expected output. Type checking uses full builtins and other stubs. Note: Currently Python interpreter paths are hard coded. Note: These test cases are *not* included in the main test suite, as including this suite would slow down the main suite too much. """ from __future__ import annotations import os import os.path import re import subprocess import sys from tempfile import TemporaryDirectory from mypy import api from mypy.defaults import PYTHON3_VERSION from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, split_lines # Path to Python 3 interpreter python3_path = sys.executable program_re = re.compile(r"\b_program.py\b") class PythonEvaluationSuite(DataSuite): files = ["pythoneval.test", "pythoneval-asyncio.test"] cache_dir = TemporaryDirectory() def run_case(self, testcase: DataDrivenTestCase) -> None: test_python_evaluation(testcase, os.path.join(self.cache_dir.name, ".mypy_cache")) def test_python_evaluation(testcase: DataDrivenTestCase, cache_dir: str) -> None: """Runs Mypy in a subprocess. If this passes without errors, executes the script again with a given Python version. """ assert testcase.old_cwd is not None, "test was not properly set up" # We must enable site packages to get access to installed stubs. mypy_cmdline = [ "--show-traceback", "--no-silence-site-packages", "--no-error-summary", "--hide-error-codes", "--allow-empty-bodies", "--force-uppercase-builtins", "--test-env", # Speeds up some checks ] interpreter = python3_path mypy_cmdline.append(f"--python-version={'.'.join(map(str, PYTHON3_VERSION))}") m = re.search("# flags: (.*)$", "\n".join(testcase.input), re.MULTILINE) if m: additional_flags = m.group(1).split() for flag in additional_flags: if flag.startswith("--python-version="): targetted_python_version = flag.split("=")[1] targetted_major, targetted_minor = targetted_python_version.split(".") if (int(targetted_major), int(targetted_minor)) > ( sys.version_info.major, sys.version_info.minor, ): return mypy_cmdline.extend(additional_flags) # Write the program to a file. program = "_" + testcase.name + ".py" program_path = os.path.join(test_temp_dir, program) mypy_cmdline.append(program_path) with open(program_path, "w", encoding="utf8") as file: for s in testcase.input: file.write(f"{s}\n") mypy_cmdline.append(f"--cache-dir={cache_dir}") output = [] # Type check the program. out, err, returncode = api.run(mypy_cmdline) # split lines, remove newlines, and remove directory of test case for line in (out + err).splitlines(): if line.startswith(test_temp_dir + os.sep): output.append(line[len(test_temp_dir + os.sep) :].rstrip("\r\n")) else: # Normalize paths so that the output is the same on Windows and Linux/macOS. line = line.replace(test_temp_dir + os.sep, test_temp_dir + "/") output.append(line.rstrip("\r\n")) if returncode > 1 and not testcase.output: # Either api.run() doesn't work well in case of a crash, or pytest interferes with it. # Tweak output to prevent tests with empty expected output to pass in case of a crash. output.append("!!! Mypy crashed !!!") if returncode == 0 and not output: # Execute the program. proc = subprocess.run( [interpreter, "-Wignore", program], cwd=test_temp_dir, capture_output=True ) output.extend(split_lines(proc.stdout, proc.stderr)) # Remove temp file. os.remove(program_path) for i, line in enumerate(output): if os.path.sep + "typeshed" + os.path.sep in line: output[i] = line.split(os.path.sep)[-1] assert_string_arrays_equal( adapt_output(testcase), output, f"Invalid output ({testcase.file}, line {testcase.line})" ) def adapt_output(testcase: DataDrivenTestCase) -> list[str]: """Translates the generic _program.py into the actual filename.""" program = "_" + testcase.name + ".py" return [program_re.sub(program, line) for line in testcase.output]
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testpythoneval.py
Python
NOASSERTION
4,630
"""Test cases for reports generated by mypy.""" from __future__ import annotations import textwrap from mypy.report import CoberturaPackage, get_line_rate from mypy.test.helpers import Suite, assert_equal try: import lxml # type: ignore[import-untyped] except ImportError: lxml = None import pytest class CoberturaReportSuite(Suite): @pytest.mark.skipif(lxml is None, reason="Cannot import lxml. Is it installed?") def test_get_line_rate(self) -> None: assert_equal("1.0", get_line_rate(0, 0)) assert_equal("0.3333", get_line_rate(1, 3)) @pytest.mark.skipif(lxml is None, reason="Cannot import lxml. Is it installed?") def test_as_xml(self) -> None: import lxml.etree as etree # type: ignore[import-untyped] cobertura_package = CoberturaPackage("foobar") cobertura_package.covered_lines = 21 cobertura_package.total_lines = 42 child_package = CoberturaPackage("raz") child_package.covered_lines = 10 child_package.total_lines = 10 child_package.classes["class"] = etree.Element("class") cobertura_package.packages["raz"] = child_package expected_output = textwrap.dedent( """\ <package complexity="1.0" name="foobar" branch-rate="0" line-rate="0.5000"> <classes/> <packages> <package complexity="1.0" name="raz" branch-rate="0" line-rate="1.0000"> <classes> <class/> </classes> </package> </packages> </package> """ ).encode("ascii") assert_equal( expected_output, etree.tostring(cobertura_package.as_xml(), pretty_print=True) )
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testreports.py
Python
NOASSERTION
1,773
"""Semantic analyzer test cases""" from __future__ import annotations import sys from typing import Dict from mypy import build from mypy.defaults import PYTHON3_VERSION from mypy.errors import CompileError from mypy.modulefinder import BuildSource from mypy.nodes import TypeInfo from mypy.options import Options from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import ( assert_string_arrays_equal, find_test_files, normalize_error_messages, parse_options, testfile_pyversion, ) # Semantic analyzer test cases: dump parse tree # Semantic analysis test case description files. semanal_files = find_test_files( pattern="semanal-*.test", exclude=[ "semanal-errors-python310.test", "semanal-errors.test", "semanal-typeinfo.test", "semanal-symtable.test", ], ) if sys.version_info < (3, 10): semanal_files.remove("semanal-python310.test") def get_semanal_options(program_text: str, testcase: DataDrivenTestCase) -> Options: options = parse_options(program_text, testcase, 1) options.use_builtins_fixtures = True options.semantic_analysis_only = True options.show_traceback = True options.python_version = PYTHON3_VERSION options.force_uppercase_builtins = True return options class SemAnalSuite(DataSuite): files = semanal_files native_sep = True def run_case(self, testcase: DataDrivenTestCase) -> None: test_semanal(testcase) def test_semanal(testcase: DataDrivenTestCase) -> None: """Perform a semantic analysis test case. The testcase argument contains a description of the test case (inputs and output). """ try: src = "\n".join(testcase.input) options = get_semanal_options(src, testcase) options.python_version = testfile_pyversion(testcase.file) result = build.build( sources=[BuildSource("main", None, src)], options=options, alt_lib_path=test_temp_dir ) a = result.errors if a: raise CompileError(a) # Include string representations of the source files in the actual # output. for module in sorted(result.files.keys()): if module in testcase.test_modules: a += result.files[module].str_with_options(options).split("\n") except CompileError as e: a = e.messages if testcase.normalize_output: a = normalize_error_messages(a) assert_string_arrays_equal( testcase.output, a, f"Invalid semantic analyzer output ({testcase.file}, line {testcase.line})", ) # Semantic analyzer error test cases class SemAnalErrorSuite(DataSuite): files = ["semanal-errors.test"] if sys.version_info >= (3, 10): semanal_files.append("semanal-errors-python310.test") def run_case(self, testcase: DataDrivenTestCase) -> None: test_semanal_error(testcase) def test_semanal_error(testcase: DataDrivenTestCase) -> None: """Perform a test case.""" try: src = "\n".join(testcase.input) res = build.build( sources=[BuildSource("main", None, src)], options=get_semanal_options(src, testcase), alt_lib_path=test_temp_dir, ) a = res.errors except CompileError as e: # Verify that there was a compile error and that the error messages # are equivalent. a = e.messages if testcase.normalize_output: a = normalize_error_messages(a) assert_string_arrays_equal( testcase.output, a, f"Invalid compiler output ({testcase.file}, line {testcase.line})" ) # SymbolNode table export test cases class SemAnalSymtableSuite(DataSuite): required_out_section = True files = ["semanal-symtable.test"] def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a test case.""" try: # Build test case input. src = "\n".join(testcase.input) result = build.build( sources=[BuildSource("main", None, src)], options=get_semanal_options(src, testcase), alt_lib_path=test_temp_dir, ) # The output is the symbol table converted into a string. a = result.errors if a: raise CompileError(a) for module in sorted(result.files.keys()): if module in testcase.test_modules: a.append(f"{module}:") for s in str(result.files[module].names).split("\n"): a.append(" " + s) except CompileError as e: a = e.messages assert_string_arrays_equal( testcase.output, a, f"Invalid semantic analyzer output ({testcase.file}, line {testcase.line})", ) # Type info export test cases class SemAnalTypeInfoSuite(DataSuite): required_out_section = True files = ["semanal-typeinfo.test"] def run_case(self, testcase: DataDrivenTestCase) -> None: """Perform a test case.""" try: # Build test case input. src = "\n".join(testcase.input) result = build.build( sources=[BuildSource("main", None, src)], options=get_semanal_options(src, testcase), alt_lib_path=test_temp_dir, ) a = result.errors if a: raise CompileError(a) # Collect all TypeInfos in top-level modules. typeinfos = TypeInfoMap() for module, file in result.files.items(): if module in testcase.test_modules: for n in file.names.values(): if isinstance(n.node, TypeInfo): assert n.fullname if any(n.fullname.startswith(m + ".") for m in testcase.test_modules): typeinfos[n.fullname] = n.node # The output is the symbol table converted into a string. a = str(typeinfos).split("\n") except CompileError as e: a = e.messages assert_string_arrays_equal( testcase.output, a, f"Invalid semantic analyzer output ({testcase.file}, line {testcase.line})", ) class TypeInfoMap(Dict[str, TypeInfo]): def __str__(self) -> str: a: list[str] = ["TypeInfoMap("] for x, y in sorted(self.items()): ti = ("\n" + " ").join(str(y).split("\n")) a.append(f" {x} : {ti}") a[-1] += ")" return "\n".join(a)
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testsemanal.py
Python
NOASSERTION
6,711
"""Test cases for the constraint solver used in type inference.""" from __future__ import annotations from mypy.constraints import SUBTYPE_OF, SUPERTYPE_OF, Constraint from mypy.solve import Bounds, Graph, solve_constraints, transitive_closure from mypy.test.helpers import Suite, assert_equal from mypy.test.typefixture import TypeFixture from mypy.types import Type, TypeVarId, TypeVarLikeType, TypeVarType class SolveSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture() def test_empty_input(self) -> None: self.assert_solve([], [], []) def test_simple_supertype_constraints(self) -> None: self.assert_solve([self.fx.t], [self.supc(self.fx.t, self.fx.a)], [self.fx.a]) self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.a), self.supc(self.fx.t, self.fx.b)], [self.fx.a], ) def test_simple_subtype_constraints(self) -> None: self.assert_solve([self.fx.t], [self.subc(self.fx.t, self.fx.a)], [self.fx.a]) self.assert_solve( [self.fx.t], [self.subc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)], [self.fx.b], ) def test_both_kinds_of_constraints(self) -> None: self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.a)], [self.fx.b], ) def test_unsatisfiable_constraints(self) -> None: # The constraints are impossible to satisfy. self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.b)], [None] ) def test_exactly_specified_result(self) -> None: self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.b)], [self.fx.b], ) def test_multiple_variables(self) -> None: self.assert_solve( [self.fx.t, self.fx.s], [ self.supc(self.fx.t, self.fx.b), self.supc(self.fx.s, self.fx.c), self.subc(self.fx.t, self.fx.a), ], [self.fx.b, self.fx.c], ) def test_no_constraints_for_var(self) -> None: self.assert_solve([self.fx.t], [], [self.fx.uninhabited]) self.assert_solve([self.fx.t, self.fx.s], [], [self.fx.uninhabited, self.fx.uninhabited]) self.assert_solve( [self.fx.t, self.fx.s], [self.supc(self.fx.s, self.fx.a)], [self.fx.uninhabited, self.fx.a], ) def test_simple_constraints_with_dynamic_type(self) -> None: self.assert_solve([self.fx.t], [self.supc(self.fx.t, self.fx.anyt)], [self.fx.anyt]) self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.anyt)], [self.fx.anyt], ) self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.anyt), self.supc(self.fx.t, self.fx.a)], [self.fx.anyt], ) self.assert_solve([self.fx.t], [self.subc(self.fx.t, self.fx.anyt)], [self.fx.anyt]) self.assert_solve( [self.fx.t], [self.subc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.anyt)], [self.fx.anyt], ) # self.assert_solve([self.fx.t], # [self.subc(self.fx.t, self.fx.anyt), # self.subc(self.fx.t, self.fx.a)], # [self.fx.anyt]) # TODO: figure out what this should be after changes to meet(any, X) def test_both_normal_and_any_types_in_results(self) -> None: # If one of the bounds is any, we promote the other bound to # any as well, since otherwise the type range does not make sense. self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.anyt)], [self.fx.anyt], ) self.assert_solve( [self.fx.t], [self.supc(self.fx.t, self.fx.anyt), self.subc(self.fx.t, self.fx.a)], [self.fx.anyt], ) def test_poly_no_constraints(self) -> None: self.assert_solve( [self.fx.t, self.fx.u], [], [self.fx.uninhabited, self.fx.uninhabited], allow_polymorphic=True, ) def test_poly_trivial_free(self) -> None: self.assert_solve( [self.fx.t, self.fx.u], [self.subc(self.fx.t, self.fx.a)], [self.fx.a, self.fx.u], [self.fx.u], allow_polymorphic=True, ) def test_poly_free_pair(self) -> None: self.assert_solve( [self.fx.t, self.fx.u], [self.subc(self.fx.t, self.fx.u)], [self.fx.t, self.fx.t], [self.fx.t], allow_polymorphic=True, ) def test_poly_free_pair_with_bounds(self) -> None: t_prime = self.fx.t.copy_modified(upper_bound=self.fx.b) self.assert_solve( [self.fx.t, self.fx.ub], [self.subc(self.fx.t, self.fx.ub)], [t_prime, t_prime], [t_prime], allow_polymorphic=True, ) def test_poly_free_pair_with_bounds_uninhabited(self) -> None: self.assert_solve( [self.fx.ub, self.fx.uc], [self.subc(self.fx.ub, self.fx.uc)], [self.fx.uninhabited, self.fx.uninhabited], [], allow_polymorphic=True, ) def test_poly_bounded_chain(self) -> None: # B <: T <: U <: S <: A self.assert_solve( [self.fx.t, self.fx.u, self.fx.s], [ self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.u), self.subc(self.fx.u, self.fx.s), self.subc(self.fx.s, self.fx.a), ], [self.fx.b, self.fx.b, self.fx.b], allow_polymorphic=True, ) def test_poly_reverse_overlapping_chain(self) -> None: # A :> T <: S :> B self.assert_solve( [self.fx.t, self.fx.s], [ self.subc(self.fx.t, self.fx.s), self.subc(self.fx.t, self.fx.a), self.supc(self.fx.s, self.fx.b), ], [self.fx.a, self.fx.a], allow_polymorphic=True, ) def test_poly_reverse_split_chain(self) -> None: # B :> T <: S :> A self.assert_solve( [self.fx.t, self.fx.s], [ self.subc(self.fx.t, self.fx.s), self.subc(self.fx.t, self.fx.b), self.supc(self.fx.s, self.fx.a), ], [self.fx.b, self.fx.a], allow_polymorphic=True, ) def test_poly_unsolvable_chain(self) -> None: # A <: T <: U <: S <: B self.assert_solve( [self.fx.t, self.fx.u, self.fx.s], [ self.supc(self.fx.t, self.fx.a), self.subc(self.fx.t, self.fx.u), self.subc(self.fx.u, self.fx.s), self.subc(self.fx.s, self.fx.b), ], [None, None, None], allow_polymorphic=True, ) def test_simple_chain_closure(self) -> None: self.assert_transitive_closure( [self.fx.t.id, self.fx.s.id], [ self.supc(self.fx.t, self.fx.b), self.subc(self.fx.t, self.fx.s), self.subc(self.fx.s, self.fx.a), ], {(self.fx.t.id, self.fx.s.id)}, {self.fx.t.id: {self.fx.b}, self.fx.s.id: {self.fx.b}}, {self.fx.t.id: {self.fx.a}, self.fx.s.id: {self.fx.a}}, ) def test_reverse_chain_closure(self) -> None: self.assert_transitive_closure( [self.fx.t.id, self.fx.s.id], [ self.subc(self.fx.t, self.fx.s), self.subc(self.fx.t, self.fx.a), self.supc(self.fx.s, self.fx.b), ], {(self.fx.t.id, self.fx.s.id)}, {self.fx.t.id: set(), self.fx.s.id: {self.fx.b}}, {self.fx.t.id: {self.fx.a}, self.fx.s.id: set()}, ) def test_secondary_constraint_closure(self) -> None: self.assert_transitive_closure( [self.fx.t.id, self.fx.s.id], [self.supc(self.fx.s, self.fx.gt), self.subc(self.fx.s, self.fx.ga)], set(), {self.fx.t.id: set(), self.fx.s.id: {self.fx.gt}}, {self.fx.t.id: {self.fx.a}, self.fx.s.id: {self.fx.ga}}, ) def assert_solve( self, vars: list[TypeVarLikeType], constraints: list[Constraint], results: list[None | Type], free_vars: list[TypeVarLikeType] | None = None, allow_polymorphic: bool = False, ) -> None: if free_vars is None: free_vars = [] actual, actual_free = solve_constraints( vars, constraints, allow_polymorphic=allow_polymorphic ) assert_equal(actual, results) assert_equal(actual_free, free_vars) def assert_transitive_closure( self, vars: list[TypeVarId], constraints: list[Constraint], graph: Graph, lowers: Bounds, uppers: Bounds, ) -> None: actual_graph, actual_lowers, actual_uppers = transitive_closure(vars, constraints) # Add trivial elements. for v in vars: graph.add((v, v)) assert_equal(actual_graph, graph) assert_equal(dict(actual_lowers), lowers) assert_equal(dict(actual_uppers), uppers) def supc(self, type_var: TypeVarType, bound: Type) -> Constraint: return Constraint(type_var, SUPERTYPE_OF, bound) def subc(self, type_var: TypeVarType, bound: Type) -> Constraint: return Constraint(type_var, SUBTYPE_OF, bound)
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testsolve.py
Python
NOASSERTION
10,031
from __future__ import annotations import io import os.path import re import shutil import sys import tempfile import unittest from types import ModuleType from typing import Any import pytest from mypy.errors import CompileError from mypy.moduleinspect import InspectError, ModuleInspect from mypy.stubdoc import ( ArgSig, FunctionSig, build_signature, find_unique_signatures, infer_arg_sig_from_anon_docstring, infer_prop_type_from_docstring, infer_sig_from_docstring, is_valid_type, parse_all_signatures, parse_signature, ) from mypy.stubgen import ( Options, collect_build_targets, generate_stubs, is_blacklisted_path, is_non_library_module, mypy_options, parse_options, ) from mypy.stubgenc import InspectionStubGenerator, infer_c_method_args from mypy.stubutil import ( ClassInfo, common_dir_prefix, infer_method_ret_type, remove_misplaced_type_comments, walk_packages, ) from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_equal, assert_string_arrays_equal, local_sys_path_set class StubgenCmdLineSuite(unittest.TestCase): """Test cases for processing command-line options and finding files.""" @unittest.skipIf(sys.platform == "win32", "clean up fails on Windows") def test_files_found(self) -> None: current = os.getcwd() with tempfile.TemporaryDirectory() as tmp: try: os.chdir(tmp) os.mkdir("subdir") self.make_file("subdir", "a.py") self.make_file("subdir", "b.py") os.mkdir(os.path.join("subdir", "pack")) self.make_file("subdir", "pack", "__init__.py") opts = parse_options(["subdir"]) py_mods, pyi_mods, c_mods = collect_build_targets(opts, mypy_options(opts)) assert_equal(pyi_mods, []) assert_equal(c_mods, []) files = {mod.path for mod in py_mods} assert_equal( files, { os.path.join("subdir", "pack", "__init__.py"), os.path.join("subdir", "a.py"), os.path.join("subdir", "b.py"), }, ) finally: os.chdir(current) @unittest.skipIf(sys.platform == "win32", "clean up fails on Windows") def test_packages_found(self) -> None: current = os.getcwd() with tempfile.TemporaryDirectory() as tmp: try: os.chdir(tmp) os.mkdir("pack") self.make_file("pack", "__init__.py", content="from . import a, b") self.make_file("pack", "a.py") self.make_file("pack", "b.py") opts = parse_options(["-p", "pack"]) py_mods, pyi_mods, c_mods = collect_build_targets(opts, mypy_options(opts)) assert_equal(pyi_mods, []) assert_equal(c_mods, []) files = {os.path.relpath(mod.path or "FAIL") for mod in py_mods} assert_equal( files, { os.path.join("pack", "__init__.py"), os.path.join("pack", "a.py"), os.path.join("pack", "b.py"), }, ) finally: os.chdir(current) @unittest.skipIf(sys.platform == "win32", "clean up fails on Windows") def test_module_not_found(self) -> None: current = os.getcwd() captured_output = io.StringIO() sys.stdout = captured_output with tempfile.TemporaryDirectory() as tmp: try: os.chdir(tmp) self.make_file(tmp, "mymodule.py", content="import a") opts = parse_options(["-m", "mymodule"]) collect_build_targets(opts, mypy_options(opts)) assert captured_output.getvalue() == "" finally: sys.stdout = sys.__stdout__ os.chdir(current) def make_file(self, *path: str, content: str = "") -> None: file = os.path.join(*path) with open(file, "w") as f: f.write(content) def run(self, result: Any | None = None) -> Any | None: with local_sys_path_set(): return super().run(result) class StubgenCliParseSuite(unittest.TestCase): def test_walk_packages(self) -> None: with ModuleInspect() as m: assert_equal(set(walk_packages(m, ["mypy.errors"])), {"mypy.errors"}) assert_equal( set(walk_packages(m, ["mypy.errors", "mypy.stubgen"])), {"mypy.errors", "mypy.stubgen"}, ) all_mypy_packages = set(walk_packages(m, ["mypy"])) self.assertTrue( all_mypy_packages.issuperset( {"mypy", "mypy.errors", "mypy.stubgen", "mypy.test", "mypy.test.helpers"} ) ) class StubgenUtilSuite(unittest.TestCase): """Unit tests for stubgen utility functions.""" def test_parse_signature(self) -> None: self.assert_parse_signature("func()", ("func", [], [])) def test_parse_signature_with_args(self) -> None: self.assert_parse_signature("func(arg)", ("func", ["arg"], [])) self.assert_parse_signature("do(arg, arg2)", ("do", ["arg", "arg2"], [])) def test_parse_signature_with_optional_args(self) -> None: self.assert_parse_signature("func([arg])", ("func", [], ["arg"])) self.assert_parse_signature("func(arg[, arg2])", ("func", ["arg"], ["arg2"])) self.assert_parse_signature("func([arg[, arg2]])", ("func", [], ["arg", "arg2"])) def test_parse_signature_with_default_arg(self) -> None: self.assert_parse_signature("func(arg=None)", ("func", [], ["arg"])) self.assert_parse_signature("func(arg, arg2=None)", ("func", ["arg"], ["arg2"])) self.assert_parse_signature('func(arg=1, arg2="")', ("func", [], ["arg", "arg2"])) def test_parse_signature_with_qualified_function(self) -> None: self.assert_parse_signature("ClassName.func(arg)", ("func", ["arg"], [])) def test_parse_signature_with_kw_only_arg(self) -> None: self.assert_parse_signature( "ClassName.func(arg, *, arg2=1)", ("func", ["arg", "*"], ["arg2"]) ) def test_parse_signature_with_star_arg(self) -> None: self.assert_parse_signature("ClassName.func(arg, *args)", ("func", ["arg", "*args"], [])) def test_parse_signature_with_star_star_arg(self) -> None: self.assert_parse_signature("ClassName.func(arg, **args)", ("func", ["arg", "**args"], [])) def assert_parse_signature(self, sig: str, result: tuple[str, list[str], list[str]]) -> None: assert_equal(parse_signature(sig), result) def test_build_signature(self) -> None: assert_equal(build_signature([], []), "()") assert_equal(build_signature(["arg"], []), "(arg)") assert_equal(build_signature(["arg", "arg2"], []), "(arg, arg2)") assert_equal(build_signature(["arg"], ["arg2"]), "(arg, arg2=...)") assert_equal(build_signature(["arg"], ["arg2", "**x"]), "(arg, arg2=..., **x)") def test_parse_all_signatures(self) -> None: assert_equal( parse_all_signatures( [ "random text", ".. function:: fn(arg", ".. function:: fn()", " .. method:: fn2(arg)", ] ), ([("fn", "()"), ("fn2", "(arg)")], []), ) def test_find_unique_signatures(self) -> None: assert_equal( find_unique_signatures( [ ("func", "()"), ("func", "()"), ("func2", "()"), ("func2", "(arg)"), ("func3", "(arg, arg2)"), ] ), [("func", "()"), ("func3", "(arg, arg2)")], ) def test_infer_sig_from_docstring(self) -> None: assert_equal( infer_sig_from_docstring("\nfunc(x) - y", "func"), [FunctionSig(name="func", args=[ArgSig(name="x")], ret_type="Any")], ) assert_equal( infer_sig_from_docstring("\nfunc(x)", "func"), [FunctionSig(name="func", args=[ArgSig(name="x")], ret_type="Any")], ) assert_equal( infer_sig_from_docstring("\nfunc(x, Y_a=None)", "func"), [ FunctionSig( name="func", args=[ArgSig(name="x"), ArgSig(name="Y_a", default=True)], ret_type="Any", ) ], ) assert_equal( infer_sig_from_docstring("\nfunc(x, Y_a=3)", "func"), [ FunctionSig( name="func", args=[ArgSig(name="x"), ArgSig(name="Y_a", default=True)], ret_type="Any", ) ], ) assert_equal( infer_sig_from_docstring("\nfunc(x, Y_a=[1, 2, 3])", "func"), [ FunctionSig( name="func", args=[ArgSig(name="x"), ArgSig(name="Y_a", default=True)], ret_type="Any", ) ], ) assert_equal(infer_sig_from_docstring("\nafunc(x) - y", "func"), []) assert_equal(infer_sig_from_docstring("\nfunc(x, y", "func"), []) assert_equal( infer_sig_from_docstring("\nfunc(x=z(y))", "func"), [FunctionSig(name="func", args=[ArgSig(name="x", default=True)], ret_type="Any")], ) assert_equal(infer_sig_from_docstring("\nfunc x", "func"), []) # Try to infer signature from type annotation. assert_equal( infer_sig_from_docstring("\nfunc(x: int)", "func"), [FunctionSig(name="func", args=[ArgSig(name="x", type="int")], ret_type="Any")], ) assert_equal( infer_sig_from_docstring("\nfunc(x: int=3)", "func"), [ FunctionSig( name="func", args=[ArgSig(name="x", type="int", default=True)], ret_type="Any" ) ], ) assert_equal( infer_sig_from_docstring("\nfunc(x=3)", "func"), [ FunctionSig( name="func", args=[ArgSig(name="x", type=None, default=True)], ret_type="Any" ) ], ) assert_equal( infer_sig_from_docstring("\nfunc() -> int", "func"), [FunctionSig(name="func", args=[], ret_type="int")], ) assert_equal( infer_sig_from_docstring("\nfunc(x: int=3) -> int", "func"), [ FunctionSig( name="func", args=[ArgSig(name="x", type="int", default=True)], ret_type="int" ) ], ) assert_equal( infer_sig_from_docstring("\nfunc(x: int=3) -> int \n", "func"), [ FunctionSig( name="func", args=[ArgSig(name="x", type="int", default=True)], ret_type="int" ) ], ) assert_equal( infer_sig_from_docstring("\nfunc(x: Tuple[int, str]) -> str", "func"), [ FunctionSig( name="func", args=[ArgSig(name="x", type="Tuple[int,str]")], ret_type="str" ) ], ) assert_equal( infer_sig_from_docstring( "\nfunc(x: Tuple[int, Tuple[str, int], str], y: int) -> str", "func" ), [ FunctionSig( name="func", args=[ ArgSig(name="x", type="Tuple[int,Tuple[str,int],str]"), ArgSig(name="y", type="int"), ], ret_type="str", ) ], ) assert_equal( infer_sig_from_docstring("\nfunc(x: foo.bar)", "func"), [FunctionSig(name="func", args=[ArgSig(name="x", type="foo.bar")], ret_type="Any")], ) assert_equal( infer_sig_from_docstring("\nfunc(x: list=[1,2,[3,4]])", "func"), [ FunctionSig( name="func", args=[ArgSig(name="x", type="list", default=True)], ret_type="Any" ) ], ) assert_equal( infer_sig_from_docstring('\nfunc(x: str="nasty[")', "func"), [ FunctionSig( name="func", args=[ArgSig(name="x", type="str", default=True)], ret_type="Any" ) ], ) assert_equal(infer_sig_from_docstring("\nfunc[(x: foo.bar, invalid]", "func"), []) assert_equal( infer_sig_from_docstring("\nfunc(x: invalid::type<with_template>)", "func"), [FunctionSig(name="func", args=[ArgSig(name="x", type=None)], ret_type="Any")], ) assert_equal( infer_sig_from_docstring('\nfunc(x: str="")', "func"), [ FunctionSig( name="func", args=[ArgSig(name="x", type="str", default=True)], ret_type="Any" ) ], ) def test_infer_sig_from_docstring_duplicate_args(self) -> None: assert_equal( infer_sig_from_docstring("\nfunc(x, x) -> str\nfunc(x, y) -> int", "func"), [FunctionSig(name="func", args=[ArgSig(name="x"), ArgSig(name="y")], ret_type="int")], ) def test_infer_sig_from_docstring_bad_indentation(self) -> None: assert_equal( infer_sig_from_docstring( """ x x x """, "func", ), None, ) def test_infer_arg_sig_from_anon_docstring(self) -> None: assert_equal( infer_arg_sig_from_anon_docstring("(*args, **kwargs)"), [ArgSig(name="*args"), ArgSig(name="**kwargs")], ) assert_equal( infer_arg_sig_from_anon_docstring( "(x: Tuple[int, Tuple[str, int], str]=(1, ('a', 2), 'y'), y: int=4)" ), [ ArgSig(name="x", type="Tuple[int,Tuple[str,int],str]", default=True), ArgSig(name="y", type="int", default=True), ], ) def test_infer_prop_type_from_docstring(self) -> None: assert_equal(infer_prop_type_from_docstring("str: A string."), "str") assert_equal(infer_prop_type_from_docstring("Optional[int]: An int."), "Optional[int]") assert_equal( infer_prop_type_from_docstring("Tuple[int, int]: A tuple."), "Tuple[int, int]" ) assert_equal(infer_prop_type_from_docstring("\nstr: A string."), None) def test_infer_sig_from_docstring_square_brackets(self) -> None: assert ( infer_sig_from_docstring("fetch_row([maxrows, how]) -- Fetches stuff", "fetch_row") == [] ) def test_remove_misplaced_type_comments_1(self) -> None: good = """ \u1234 def f(x): # type: (int) -> int def g(x): # type: (int) -> int def h(): # type: () int x = 1 # type: int """ assert_equal(remove_misplaced_type_comments(good), good) def test_remove_misplaced_type_comments_2(self) -> None: bad = """ def f(x): # type: Callable[[int], int] pass # type: "foo" # type: 'bar' x = 1 # type: int """ bad_fixed = """ def f(x): pass x = 1 """ assert_equal(remove_misplaced_type_comments(bad), bad_fixed) def test_remove_misplaced_type_comments_3(self) -> None: bad = ''' def f(x): """docstring""" # type: (int) -> int pass def g(x): """docstring """ # type: (int) -> int pass ''' bad_fixed = ''' def f(x): """docstring""" pass def g(x): """docstring """ pass ''' assert_equal(remove_misplaced_type_comments(bad), bad_fixed) def test_remove_misplaced_type_comments_4(self) -> None: bad = """ def f(x): '''docstring''' # type: (int) -> int pass def g(x): '''docstring ''' # type: (int) -> int pass """ bad_fixed = """ def f(x): '''docstring''' pass def g(x): '''docstring ''' pass """ assert_equal(remove_misplaced_type_comments(bad), bad_fixed) def test_remove_misplaced_type_comments_5(self) -> None: bad = """ def f(x): # type: (int, List[Any], # float, bool) -> int pass def g(x): # type: (int, List[Any]) pass """ bad_fixed = """ def f(x): # float, bool) -> int pass def g(x): pass """ assert_equal(remove_misplaced_type_comments(bad), bad_fixed) def test_remove_misplaced_type_comments_bytes(self) -> None: original = b""" \xbf def f(x): # type: (int) -> int def g(x): # type: (int) -> int pass def h(): # type: int pass x = 1 # type: int """ dest = b""" \xbf def f(x): # type: (int) -> int def g(x): # type: (int) -> int pass def h(): pass x = 1 # type: int """ assert_equal(remove_misplaced_type_comments(original), dest) @unittest.skipIf(sys.platform == "win32", "Tests building the paths common ancestor on *nix") def test_common_dir_prefix_unix(self) -> None: assert common_dir_prefix([]) == "." assert common_dir_prefix(["x.pyi"]) == "." assert common_dir_prefix(["./x.pyi"]) == "." assert common_dir_prefix(["foo/bar/x.pyi"]) == "foo/bar" assert common_dir_prefix(["foo/bar/x.pyi", "foo/bar/y.pyi"]) == "foo/bar" assert common_dir_prefix(["foo/bar/x.pyi", "foo/y.pyi"]) == "foo" assert common_dir_prefix(["foo/x.pyi", "foo/bar/y.pyi"]) == "foo" assert common_dir_prefix(["foo/bar/zar/x.pyi", "foo/y.pyi"]) == "foo" assert common_dir_prefix(["foo/x.pyi", "foo/bar/zar/y.pyi"]) == "foo" assert common_dir_prefix(["foo/bar/zar/x.pyi", "foo/bar/y.pyi"]) == "foo/bar" assert common_dir_prefix(["foo/bar/x.pyi", "foo/bar/zar/y.pyi"]) == "foo/bar" assert common_dir_prefix([r"foo/bar\x.pyi"]) == "foo" assert common_dir_prefix([r"foo\bar/x.pyi"]) == r"foo\bar" @unittest.skipIf( sys.platform != "win32", "Tests building the paths common ancestor on Windows" ) def test_common_dir_prefix_win(self) -> None: assert common_dir_prefix(["x.pyi"]) == "." assert common_dir_prefix([r".\x.pyi"]) == "." assert common_dir_prefix([r"foo\bar\x.pyi"]) == r"foo\bar" assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\bar\y.pyi"]) == r"foo\bar" assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\y.pyi"]) == "foo" assert common_dir_prefix([r"foo\x.pyi", r"foo\bar\y.pyi"]) == "foo" assert common_dir_prefix([r"foo\bar\zar\x.pyi", r"foo\y.pyi"]) == "foo" assert common_dir_prefix([r"foo\x.pyi", r"foo\bar\zar\y.pyi"]) == "foo" assert common_dir_prefix([r"foo\bar\zar\x.pyi", r"foo\bar\y.pyi"]) == r"foo\bar" assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\bar\zar\y.pyi"]) == r"foo\bar" assert common_dir_prefix([r"foo/bar\x.pyi"]) == r"foo\bar" assert common_dir_prefix([r"foo\bar/x.pyi"]) == r"foo\bar" assert common_dir_prefix([r"foo/bar/x.pyi"]) == r"foo\bar" class StubgenHelpersSuite(unittest.TestCase): def test_is_blacklisted_path(self) -> None: assert not is_blacklisted_path("foo/bar.py") assert not is_blacklisted_path("foo.py") assert not is_blacklisted_path("foo/xvendor/bar.py") assert not is_blacklisted_path("foo/vendorx/bar.py") assert is_blacklisted_path("foo/vendor/bar.py") assert is_blacklisted_path("foo/vendored/bar.py") assert is_blacklisted_path("foo/vendored/bar/thing.py") assert is_blacklisted_path("foo/six.py") def test_is_non_library_module(self) -> None: assert not is_non_library_module("foo") assert not is_non_library_module("foo.bar") # The following could be test modules, but we are very conservative and # don't treat them as such since they could plausibly be real modules. assert not is_non_library_module("foo.bartest") assert not is_non_library_module("foo.bartests") assert not is_non_library_module("foo.testbar") assert is_non_library_module("foo.test") assert is_non_library_module("foo.test.foo") assert is_non_library_module("foo.tests") assert is_non_library_module("foo.tests.foo") assert is_non_library_module("foo.testing.foo") assert is_non_library_module("foo.SelfTest.foo") assert is_non_library_module("foo.test_bar") assert is_non_library_module("foo.bar_tests") assert is_non_library_module("foo.testing") assert is_non_library_module("foo.conftest") assert is_non_library_module("foo.bar_test_util") assert is_non_library_module("foo.bar_test_utils") assert is_non_library_module("foo.bar_test_base") assert is_non_library_module("foo.setup") assert is_non_library_module("foo.__main__") class StubgenPythonSuite(DataSuite): """Data-driven end-to-end test cases that generate stub files. You can use these magic test case name suffixes: *_semanal Run semantic analysis (slow as this uses real stubs -- only use when necessary) *_import Import module and perform runtime introspection (in the current process!) You can use these magic comments: # flags: --some-stubgen-option ... Specify custom stubgen options # modules: module1 module2 ... Specify which modules to output (by default only 'main') """ required_out_section = True base_path = "." files = ["stubgen.test"] @unittest.skipIf(sys.platform == "win32", "clean up fails on Windows") def run_case(self, testcase: DataDrivenTestCase) -> None: with local_sys_path_set(): self.run_case_inner(testcase) def run_case_inner(self, testcase: DataDrivenTestCase) -> None: extra = [] # Extra command-line args mods = [] # Module names to process source = "\n".join(testcase.input) for file, content in testcase.files + [("./main.py", source)]: # Strip ./ prefix and .py suffix. mod = file[2:-3].replace("/", ".") if mod.endswith(".__init__"): mod, _, _ = mod.rpartition(".") mods.append(mod) if "-p " not in source: extra.extend(["-m", mod]) with open(file, "w") as f: f.write(content) options = self.parse_flags(source, extra) if sys.version_info < options.pyversion: pytest.skip() modules = self.parse_modules(source) out_dir = "out" try: try: if testcase.name.endswith("_inspect"): options.inspect = True else: if not testcase.name.endswith("_import"): options.no_import = True if not testcase.name.endswith("_semanal"): options.parse_only = True generate_stubs(options) a: list[str] = [] for module in modules: fnam = module_to_path(out_dir, module) self.add_file(fnam, a, header=len(modules) > 1) except CompileError as e: a = e.messages assert_string_arrays_equal( testcase.output, a, f"Invalid output ({testcase.file}, line {testcase.line})" ) finally: for mod in mods: if mod in sys.modules: del sys.modules[mod] shutil.rmtree(out_dir) def parse_flags(self, program_text: str, extra: list[str]) -> Options: flags = re.search("# flags: (.*)$", program_text, flags=re.MULTILINE) pyversion = None if flags: flag_list = flags.group(1).split() for i, flag in enumerate(flag_list): if flag.startswith("--python-version="): pyversion = flag.split("=", 1)[1] del flag_list[i] break else: flag_list = [] options = parse_options(flag_list + extra) if pyversion: # A hack to allow testing old python versions with new language constructs # This should be rarely used in general as stubgen output should not be version-specific major, minor = pyversion.split(".", 1) options.pyversion = (int(major), int(minor)) if "--verbose" not in flag_list: options.quiet = True else: options.verbose = True return options def parse_modules(self, program_text: str) -> list[str]: modules = re.search("# modules: (.*)$", program_text, flags=re.MULTILINE) if modules: return modules.group(1).split() else: return ["main"] def add_file(self, path: str, result: list[str], header: bool) -> None: if not os.path.exists(path): result.append("<%s was not generated>" % path.replace("\\", "/")) return if header: result.append(f"# {path[4:]}") with open(path, encoding="utf8") as file: result.extend(file.read().splitlines()) self_arg = ArgSig(name="self") class TestBaseClass: pass class TestClass(TestBaseClass): pass class StubgencSuite(unittest.TestCase): """Unit tests for stub generation from C modules using introspection. Note that these don't cover a lot! """ def test_infer_hash_sig(self) -> None: assert_equal(infer_c_method_args("__hash__"), [self_arg]) assert_equal(infer_method_ret_type("__hash__"), "int") def test_infer_getitem_sig(self) -> None: assert_equal(infer_c_method_args("__getitem__"), [self_arg, ArgSig(name="index")]) def test_infer_setitem_sig(self) -> None: assert_equal( infer_c_method_args("__setitem__"), [self_arg, ArgSig(name="index"), ArgSig(name="object")], ) assert_equal(infer_method_ret_type("__setitem__"), "None") def test_infer_eq_op_sig(self) -> None: for op in ("eq", "ne", "lt", "le", "gt", "ge"): assert_equal( infer_c_method_args(f"__{op}__"), [self_arg, ArgSig(name="other", type="object")] ) def test_infer_binary_op_sig(self) -> None: for op in ("add", "radd", "sub", "rsub", "mul", "rmul"): assert_equal(infer_c_method_args(f"__{op}__"), [self_arg, ArgSig(name="other")]) def test_infer_equality_op_sig(self) -> None: for op in ("eq", "ne", "lt", "le", "gt", "ge", "contains"): assert_equal(infer_method_ret_type(f"__{op}__"), "bool") def test_infer_unary_op_sig(self) -> None: for op in ("neg", "pos"): assert_equal(infer_c_method_args(f"__{op}__"), [self_arg]) def test_infer_cast_sig(self) -> None: for op in ("float", "bool", "bytes", "int"): assert_equal(infer_method_ret_type(f"__{op}__"), op) def test_generate_class_stub_no_crash_for_object(self) -> None: output: list[str] = [] mod = ModuleType("module", "") # any module is fine gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_class_stub("alias", object, output) assert_equal(gen.get_imports().splitlines(), []) assert_equal(output[0], "class alias:") def test_generate_class_stub_variable_type_annotation(self) -> None: # This class mimics the stubgen unit test 'testClassVariable' class TestClassVariableCls: x = 1 output: list[str] = [] mod = ModuleType("module", "") # any module is fine gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_class_stub("C", TestClassVariableCls, output) assert_equal(gen.get_imports().splitlines(), ["from typing import ClassVar"]) assert_equal(output, ["class C:", " x: ClassVar[int] = ..."]) def test_non_c_generate_signature_with_kw_only_args(self) -> None: class TestClass: def test( self, arg0: str, *, keyword_only: str, keyword_only_with_default: int = 7 ) -> None: pass output: list[str] = [] mod = ModuleType(TestClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.is_c_module = False gen.generate_function_stub( "test", TestClass.test, output=output, class_info=ClassInfo( self_var="self", cls=TestClass, name="TestClass", docstring=getattr(TestClass, "__doc__", None), ), ) assert_equal( output, [ "def test(self, arg0: str, *, keyword_only: str, keyword_only_with_default: int = ...) -> None: ..." ], ) def test_generate_c_type_inheritance(self) -> None: class TestClass(KeyError): pass output: list[str] = [] mod = ModuleType("module, ") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_class_stub("C", TestClass, output) assert_equal(output, ["class C(KeyError): ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_inheritance_same_module(self) -> None: output: list[str] = [] mod = ModuleType(TestBaseClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_class_stub("C", TestClass, output) assert_equal(output, ["class C(TestBaseClass): ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_inheritance_other_module(self) -> None: import argparse class TestClass(argparse.Action): pass output: list[str] = [] mod = ModuleType("module", "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_class_stub("C", TestClass, output) assert_equal(output, ["class C(argparse.Action): ..."]) assert_equal(gen.get_imports().splitlines(), ["import argparse"]) def test_generate_c_type_inheritance_builtin_type(self) -> None: class TestClass(type): pass output: list[str] = [] mod = ModuleType("module", "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_class_stub("C", TestClass, output) assert_equal(output, ["class C(type): ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_with_docstring(self) -> None: class TestClass: def test(self, arg0: str) -> None: """ test(self: TestClass, arg0: int) """ output: list[str] = [] mod = ModuleType(TestClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub( "test", TestClass.test, output=output, class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal(output, ["def test(self, arg0: int) -> Any: ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_with_docstring_no_self_arg(self) -> None: class TestClass: def test(self, arg0: str) -> None: """ test(arg0: int) """ output: list[str] = [] mod = ModuleType(TestClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub( "test", TestClass.test, output=output, class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal(output, ["def test(self, arg0: int) -> Any: ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_classmethod(self) -> None: class TestClass: @classmethod def test(cls, arg0: str) -> None: pass output: list[str] = [] mod = ModuleType(TestClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub( "test", TestClass.test, output=output, class_info=ClassInfo(self_var="cls", cls=TestClass, name="TestClass"), ) assert_equal(output, ["@classmethod", "def test(cls, *args, **kwargs): ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_classmethod_with_overloads(self) -> None: class TestClass: @classmethod def test(self, arg0: str) -> None: """ test(cls, arg0: str) test(cls, arg0: int) """ pass output: list[str] = [] mod = ModuleType(TestClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub( "test", TestClass.test, output=output, class_info=ClassInfo(self_var="cls", cls=TestClass, name="TestClass"), ) assert_equal( output, [ "@overload", "@classmethod", "def test(cls, arg0: str) -> Any: ...", "@overload", "@classmethod", "def test(cls, arg0: int) -> Any: ...", ], ) assert_equal(gen.get_imports().splitlines(), ["from typing import overload"]) def test_generate_c_type_with_docstring_empty_default(self) -> None: class TestClass: def test(self, arg0: str = "") -> None: """ test(self: TestClass, arg0: str = "") """ output: list[str] = [] mod = ModuleType(TestClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub( "test", TestClass.test, output=output, class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal(output, ["def test(self, arg0: str = ...) -> Any: ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_function_other_module_arg(self) -> None: """Test that if argument references type from other module, module will be imported.""" # Provide different type in python spec than in docstring to make sure, that docstring # information is used. def test(arg0: str) -> None: """ test(arg0: argparse.Action) """ output: list[str] = [] mod = ModuleType(self.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub("test", test, output=output) assert_equal(output, ["def test(arg0: argparse.Action) -> Any: ..."]) assert_equal(gen.get_imports().splitlines(), ["import argparse"]) def test_generate_c_function_same_module(self) -> None: """Test that if annotation references type from same module but using full path, no module will be imported, and type specification will be striped to local reference. """ # Provide different type in python spec than in docstring to make sure, that docstring # information is used. def test(arg0: str) -> None: """ test(arg0: argparse.Action) -> argparse.Action """ output: list[str] = [] mod = ModuleType("argparse", "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub("test", test, output=output) assert_equal(output, ["def test(arg0: Action) -> Action: ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_function_other_module(self) -> None: """Test that if annotation references type from other module, module will be imported.""" def test(arg0: str) -> None: """ test(arg0: argparse.Action) -> argparse.Action """ output: list[str] = [] mod = ModuleType(self.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub("test", test, output=output) assert_equal(output, ["def test(arg0: argparse.Action) -> argparse.Action: ..."]) assert_equal(gen.get_imports().splitlines(), ["import argparse"]) def test_generate_c_function_same_module_nested(self) -> None: """Test that if annotation references type from same module but using full path, no module will be imported, and type specification will be stripped to local reference. """ # Provide different type in python spec than in docstring to make sure, that docstring # information is used. def test(arg0: str) -> None: """ test(arg0: list[argparse.Action]) -> list[argparse.Action] """ output: list[str] = [] mod = ModuleType("argparse", "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub("test", test, output=output) assert_equal(output, ["def test(arg0: list[Action]) -> list[Action]: ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_function_same_module_compound(self) -> None: """Test that if annotation references type from same module but using full path, no module will be imported, and type specification will be stripped to local reference. """ # Provide different type in python spec than in docstring to make sure, that docstring # information is used. def test(arg0: str) -> None: """ test(arg0: Union[argparse.Action, NoneType]) -> Tuple[argparse.Action, NoneType] """ output: list[str] = [] mod = ModuleType("argparse", "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub("test", test, output=output) assert_equal(output, ["def test(arg0: Union[Action, None]) -> Tuple[Action, None]: ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_function_other_module_nested(self) -> None: """Test that if annotation references type from other module, module will be imported, and the import will be restricted to one of the known modules.""" def test(arg0: str) -> None: """ test(arg0: foo.bar.Action) -> other.Thing """ output: list[str] = [] mod = ModuleType(self.__module__, "") gen = InspectionStubGenerator( mod.__name__, known_modules=["foo", "foo.spangle", "bar"], module=mod ) gen.generate_function_stub("test", test, output=output) assert_equal(output, ["def test(arg0: foo.bar.Action) -> other.Thing: ..."]) assert_equal(gen.get_imports().splitlines(), ["import foo", "import other"]) def test_generate_c_function_no_crash_for_non_str_docstring(self) -> None: def test(arg0: str) -> None: ... test.__doc__ = property(lambda self: "test(arg0: str) -> None") # type: ignore[assignment] output: list[str] = [] mod = ModuleType(self.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub("test", test, output=output) assert_equal(output, ["def test(*args, **kwargs): ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_property_with_pybind11(self) -> None: """Signatures included by PyBind11 inside property.fget are read.""" class TestClass: def get_attribute(self) -> None: """ (self: TestClass) -> str """ attribute = property(get_attribute, doc="") readwrite_properties: list[str] = [] readonly_properties: list[str] = [] mod = ModuleType("module", "") # any module is fine gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_property_stub( "attribute", TestClass.__dict__["attribute"], TestClass.attribute, [], readwrite_properties, readonly_properties, ) assert_equal(readwrite_properties, []) assert_equal(readonly_properties, ["@property", "def attribute(self) -> str: ..."]) def test_generate_c_property_with_rw_property(self) -> None: class TestClass: def __init__(self) -> None: self._attribute = 0 @property def attribute(self) -> int: return self._attribute @attribute.setter def attribute(self, value: int) -> None: self._attribute = value readwrite_properties: list[str] = [] readonly_properties: list[str] = [] mod = ModuleType("module", "") # any module is fine gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_property_stub( "attribute", TestClass.__dict__["attribute"], TestClass.attribute, [], readwrite_properties, readonly_properties, ) assert_equal(readwrite_properties, ["attribute: Incomplete"]) assert_equal(readonly_properties, []) def test_generate_c_type_with_single_arg_generic(self) -> None: class TestClass: def test(self, arg0: str) -> None: """ test(self: TestClass, arg0: List[int]) """ output: list[str] = [] mod = ModuleType(TestClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub( "test", TestClass.test, output=output, class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal(output, ["def test(self, arg0: List[int]) -> Any: ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_with_double_arg_generic(self) -> None: class TestClass: def test(self, arg0: str) -> None: """ test(self: TestClass, arg0: Dict[str, int]) """ output: list[str] = [] mod = ModuleType(TestClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub( "test", TestClass.test, output=output, class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal(output, ["def test(self, arg0: Dict[str, int]) -> Any: ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_with_nested_generic(self) -> None: class TestClass: def test(self, arg0: str) -> None: """ test(self: TestClass, arg0: Dict[str, List[int]]) """ output: list[str] = [] mod = ModuleType(TestClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub( "test", TestClass.test, output=output, class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal(output, ["def test(self, arg0: Dict[str, List[int]]) -> Any: ..."]) assert_equal(gen.get_imports().splitlines(), []) def test_generate_c_type_with_generic_using_other_module_first(self) -> None: class TestClass: def test(self, arg0: str) -> None: """ test(self: TestClass, arg0: Dict[argparse.Action, int]) """ output: list[str] = [] mod = ModuleType(TestClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub( "test", TestClass.test, output=output, class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal(output, ["def test(self, arg0: Dict[argparse.Action, int]) -> Any: ..."]) assert_equal(gen.get_imports().splitlines(), ["import argparse"]) def test_generate_c_type_with_generic_using_other_module_last(self) -> None: class TestClass: def test(self, arg0: str) -> None: """ test(self: TestClass, arg0: Dict[str, argparse.Action]) """ output: list[str] = [] mod = ModuleType(TestClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub( "test", TestClass.test, output=output, class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal(output, ["def test(self, arg0: Dict[str, argparse.Action]) -> Any: ..."]) assert_equal(gen.get_imports().splitlines(), ["import argparse"]) def test_generate_c_type_with_overload_pybind11(self) -> None: class TestClass: def __init__(self, arg0: str) -> None: """ __init__(*args, **kwargs) Overloaded function. 1. __init__(self: TestClass, arg0: str) -> None 2. __init__(self: TestClass, arg0: str, arg1: str) -> None """ output: list[str] = [] mod = ModuleType(TestClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub( "__init__", TestClass.__init__, output=output, class_info=ClassInfo(self_var="self", cls=TestClass, name="TestClass"), ) assert_equal( output, [ "@overload", "def __init__(self, arg0: str) -> None: ...", "@overload", "def __init__(self, arg0: str, arg1: str) -> None: ...", "@overload", "def __init__(self, *args, **kwargs) -> Any: ...", ], ) assert_equal(gen.get_imports().splitlines(), ["from typing import overload"]) def test_generate_c_type_with_overload_shiboken(self) -> None: class TestClass: """ TestClass(self: TestClass, arg0: str) -> None TestClass(self: TestClass, arg0: str, arg1: str) -> None """ def __init__(self, arg0: str) -> None: pass output: list[str] = [] mod = ModuleType(TestClass.__module__, "") gen = InspectionStubGenerator(mod.__name__, known_modules=[mod.__name__], module=mod) gen.generate_function_stub( "__init__", TestClass.__init__, output=output, class_info=ClassInfo( self_var="self", cls=TestClass, name="TestClass", docstring=getattr(TestClass, "__doc__", None), ), ) assert_equal( output, [ "@overload", "def __init__(self, arg0: str) -> None: ...", "@overload", "def __init__(self, arg0: str, arg1: str) -> None: ...", ], ) assert_equal(gen.get_imports().splitlines(), ["from typing import overload"]) class ArgSigSuite(unittest.TestCase): def test_repr(self) -> None: assert_equal( repr(ArgSig(name='asd"dsa')), "ArgSig(name='asd\"dsa', type=None, default=False)" ) assert_equal( repr(ArgSig(name="asd'dsa")), 'ArgSig(name="asd\'dsa", type=None, default=False)' ) assert_equal(repr(ArgSig("func", "str")), "ArgSig(name='func', type='str', default=False)") assert_equal( repr(ArgSig("func", "str", default=True)), "ArgSig(name='func', type='str', default=True)", ) class IsValidTypeSuite(unittest.TestCase): def test_is_valid_type(self) -> None: assert is_valid_type("int") assert is_valid_type("str") assert is_valid_type("Foo_Bar234") assert is_valid_type("foo.bar") assert is_valid_type("List[int]") assert is_valid_type("Dict[str, int]") assert is_valid_type("None") assert is_valid_type("Literal[26]") assert is_valid_type("Literal[0x1A]") assert is_valid_type('Literal["hello world"]') assert is_valid_type('Literal[b"hello world"]') assert is_valid_type('Literal[u"hello world"]') assert is_valid_type("Literal[True]") assert is_valid_type("Literal[Color.RED]") assert is_valid_type("Literal[None]") assert is_valid_type( 'Literal[26, 0x1A, "hello world", b"hello world", u"hello world", True, Color.RED, None]' ) assert not is_valid_type("foo-bar") assert not is_valid_type("x->y") assert not is_valid_type("True") assert not is_valid_type("False") assert not is_valid_type("x,y") assert not is_valid_type("x, y") class ModuleInspectSuite(unittest.TestCase): def test_python_module(self) -> None: with ModuleInspect() as m: p = m.get_package_properties("inspect") assert p is not None assert p.name == "inspect" assert p.file assert p.path is None assert p.is_c_module is False assert p.subpackages == [] def test_python_package(self) -> None: with ModuleInspect() as m: p = m.get_package_properties("unittest") assert p is not None assert p.name == "unittest" assert p.file assert p.path assert p.is_c_module is False assert p.subpackages assert all(sub.startswith("unittest.") for sub in p.subpackages) def test_c_module(self) -> None: with ModuleInspect() as m: p = m.get_package_properties("_socket") assert p is not None assert p.name == "_socket" assert p.path is None assert p.is_c_module is True assert p.subpackages == [] def test_non_existent(self) -> None: with ModuleInspect() as m: with self.assertRaises(InspectError) as e: m.get_package_properties("foobar-non-existent") assert str(e.exception) == "No module named 'foobar-non-existent'" def module_to_path(out_dir: str, module: str) -> str: fnam = os.path.join(out_dir, f"{module.replace('.', '/')}.pyi") if not os.path.exists(fnam): alt_fnam = fnam.replace(".pyi", "/__init__.pyi") if os.path.exists(alt_fnam): return alt_fnam return fnam
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/teststubgen.py
Python
NOASSERTION
53,668
from __future__ import annotations import unittest from mypy.stubinfo import ( approved_stub_package_exists, is_module_from_legacy_bundled_package, legacy_bundled_packages, non_bundled_packages_flat, stub_distribution_name, ) class TestStubInfo(unittest.TestCase): def test_is_legacy_bundled_packages(self) -> None: assert not is_module_from_legacy_bundled_package("foobar_asdf") assert not is_module_from_legacy_bundled_package("PIL") assert is_module_from_legacy_bundled_package("pycurl") assert is_module_from_legacy_bundled_package("dataclasses") def test_approved_stub_package_exists(self) -> None: assert not approved_stub_package_exists("foobar_asdf") assert approved_stub_package_exists("pycurl") assert approved_stub_package_exists("babel") assert approved_stub_package_exists("google.cloud.ndb") assert approved_stub_package_exists("google.cloud.ndb.submodule") assert not approved_stub_package_exists("google.cloud.unknown") assert approved_stub_package_exists("google.protobuf") assert approved_stub_package_exists("google.protobuf.submodule") assert not approved_stub_package_exists("google") def test_stub_distribution_name(self) -> None: assert stub_distribution_name("foobar_asdf") is None assert stub_distribution_name("pycurl") == "types-pycurl" assert stub_distribution_name("babel") == "types-babel" assert stub_distribution_name("google.cloud.ndb") == "types-google-cloud-ndb" assert stub_distribution_name("google.cloud.ndb.submodule") == "types-google-cloud-ndb" assert stub_distribution_name("google.cloud.unknown") is None assert stub_distribution_name("google.protobuf") == "types-protobuf" assert stub_distribution_name("google.protobuf.submodule") == "types-protobuf" assert stub_distribution_name("google") is None def test_period_in_top_level(self) -> None: for packages in (non_bundled_packages_flat, legacy_bundled_packages): for top_level_module in packages: assert "." not in top_level_module
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/teststubinfo.py
Python
NOASSERTION
2,182
from __future__ import annotations import contextlib import inspect import io import os import re import sys import tempfile import textwrap import unittest from typing import Any, Callable, Iterator import mypy.stubtest from mypy.stubtest import parse_options, test_stubs from mypy.test.data import root_dir @contextlib.contextmanager def use_tmp_dir(mod_name: str) -> Iterator[str]: current = os.getcwd() current_syspath = sys.path.copy() with tempfile.TemporaryDirectory() as tmp: try: os.chdir(tmp) if sys.path[0] != tmp: sys.path.insert(0, tmp) yield tmp finally: sys.path = current_syspath.copy() if mod_name in sys.modules: del sys.modules[mod_name] os.chdir(current) TEST_MODULE_NAME = "test_module" stubtest_typing_stub = """ Any = object() class _SpecialForm: def __getitem__(self, typeargs: Any) -> object: ... Callable: _SpecialForm = ... Generic: _SpecialForm = ... Protocol: _SpecialForm = ... Union: _SpecialForm = ... class TypeVar: def __init__(self, name, covariant: bool = ..., contravariant: bool = ...) -> None: ... class ParamSpec: def __init__(self, name: str) -> None: ... AnyStr = TypeVar("AnyStr", str, bytes) _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) _K = TypeVar("_K") _V = TypeVar("_V") _S = TypeVar("_S", contravariant=True) _R = TypeVar("_R", covariant=True) class Coroutine(Generic[_T_co, _S, _R]): ... class Iterable(Generic[_T_co]): ... class Iterator(Iterable[_T_co]): ... class Mapping(Generic[_K, _V]): ... class Match(Generic[AnyStr]): ... class Sequence(Iterable[_T_co]): ... class Tuple(Sequence[_T_co]): ... class NamedTuple(tuple[Any, ...]): ... def overload(func: _T) -> _T: ... def type_check_only(func: _T) -> _T: ... def final(func: _T) -> _T: ... """ stubtest_builtins_stub = """ from typing import Generic, Mapping, Sequence, TypeVar, overload T = TypeVar('T') T_co = TypeVar('T_co', covariant=True) KT = TypeVar('KT') VT = TypeVar('VT') class object: __module__: str def __init__(self) -> None: pass def __repr__(self) -> str: pass class type: ... class tuple(Sequence[T_co], Generic[T_co]): def __ge__(self, __other: tuple[T_co, ...]) -> bool: pass class dict(Mapping[KT, VT]): ... class function: pass class ellipsis: pass class int: ... class float: ... class bool(int): ... class str: ... class bytes: ... class list(Sequence[T]): ... def property(f: T) -> T: ... def classmethod(f: T) -> T: ... def staticmethod(f: T) -> T: ... """ stubtest_enum_stub = """ import sys from typing import Any, TypeVar, Iterator _T = TypeVar('_T') class EnumMeta(type): def __len__(self) -> int: pass def __iter__(self: type[_T]) -> Iterator[_T]: pass def __reversed__(self: type[_T]) -> Iterator[_T]: pass def __getitem__(self: type[_T], name: str) -> _T: pass class Enum(metaclass=EnumMeta): def __new__(cls: type[_T], value: object) -> _T: pass def __repr__(self) -> str: pass def __str__(self) -> str: pass def __format__(self, format_spec: str) -> str: pass def __hash__(self) -> Any: pass def __reduce_ex__(self, proto: Any) -> Any: pass name: str value: Any class Flag(Enum): def __or__(self: _T, other: _T) -> _T: pass def __and__(self: _T, other: _T) -> _T: pass def __xor__(self: _T, other: _T) -> _T: pass def __invert__(self: _T) -> _T: pass if sys.version_info >= (3, 11): __ror__ = __or__ __rand__ = __and__ __rxor__ = __xor__ """ def run_stubtest_with_stderr( stub: str, runtime: str, options: list[str], config_file: str | None = None ) -> tuple[str, str]: with use_tmp_dir(TEST_MODULE_NAME) as tmp_dir: with open("builtins.pyi", "w") as f: f.write(stubtest_builtins_stub) with open("typing.pyi", "w") as f: f.write(stubtest_typing_stub) with open("enum.pyi", "w") as f: f.write(stubtest_enum_stub) with open(f"{TEST_MODULE_NAME}.pyi", "w") as f: f.write(stub) with open(f"{TEST_MODULE_NAME}.py", "w") as f: f.write(runtime) if config_file: with open(f"{TEST_MODULE_NAME}_config.ini", "w") as f: f.write(config_file) options = options + ["--mypy-config-file", f"{TEST_MODULE_NAME}_config.ini"] output = io.StringIO() outerr = io.StringIO() with contextlib.redirect_stdout(output), contextlib.redirect_stderr(outerr): test_stubs(parse_options([TEST_MODULE_NAME] + options), use_builtins_fixtures=True) filtered_output = remove_color_code( output.getvalue() # remove cwd as it's not available from outside .replace(os.path.realpath(tmp_dir) + os.sep, "").replace(tmp_dir + os.sep, "") ) filtered_outerr = remove_color_code( outerr.getvalue() # remove cwd as it's not available from outside .replace(os.path.realpath(tmp_dir) + os.sep, "").replace(tmp_dir + os.sep, "") ) return filtered_output, filtered_outerr def run_stubtest( stub: str, runtime: str, options: list[str], config_file: str | None = None ) -> str: return run_stubtest_with_stderr(stub, runtime, options, config_file)[0] class Case: def __init__(self, stub: str, runtime: str, error: str | None) -> None: self.stub = stub self.runtime = runtime self.error = error def collect_cases(fn: Callable[..., Iterator[Case]]) -> Callable[..., None]: """run_stubtest used to be slow, so we used this decorator to combine cases. If you're reading this and bored, feel free to refactor this and make it more like other mypy tests. """ def test(*args: Any, **kwargs: Any) -> None: cases = list(fn(*args, **kwargs)) expected_errors = set() for c in cases: if c.error is None: continue expected_error = c.error if expected_error == "": expected_error = TEST_MODULE_NAME elif not expected_error.startswith(f"{TEST_MODULE_NAME}."): expected_error = f"{TEST_MODULE_NAME}.{expected_error}" assert expected_error not in expected_errors, ( "collect_cases merges cases into a single stubtest invocation; we already " "expect an error for {}".format(expected_error) ) expected_errors.add(expected_error) output = run_stubtest( stub="\n\n".join(textwrap.dedent(c.stub.lstrip("\n")) for c in cases), runtime="\n\n".join(textwrap.dedent(c.runtime.lstrip("\n")) for c in cases), options=["--generate-allowlist"], ) actual_errors = set(output.splitlines()) if actual_errors != expected_errors: output = run_stubtest( stub="\n\n".join(textwrap.dedent(c.stub.lstrip("\n")) for c in cases), runtime="\n\n".join(textwrap.dedent(c.runtime.lstrip("\n")) for c in cases), options=[], ) assert actual_errors == expected_errors, output return test class StubtestUnit(unittest.TestCase): @collect_cases def test_basic_good(self) -> Iterator[Case]: yield Case( stub="def f(number: int, text: str) -> None: ...", runtime="def f(number, text): pass", error=None, ) yield Case( stub=""" class X: def f(self, number: int, text: str) -> None: ... """, runtime=""" class X: def f(self, number, text): pass """, error=None, ) @collect_cases def test_types(self) -> Iterator[Case]: yield Case( stub="def mistyped_class() -> None: ...", runtime="class mistyped_class: pass", error="mistyped_class", ) yield Case( stub="class mistyped_fn: ...", runtime="def mistyped_fn(): pass", error="mistyped_fn" ) yield Case( stub=""" class X: def mistyped_var(self) -> int: ... """, runtime=""" class X: mistyped_var = 1 """, error="X.mistyped_var", ) @collect_cases def test_coroutines(self) -> Iterator[Case]: yield Case(stub="def bar() -> int: ...", runtime="async def bar(): return 5", error="bar") # Don't error for this one -- we get false positives otherwise yield Case(stub="async def foo() -> int: ...", runtime="def foo(): return 5", error=None) yield Case(stub="def baz() -> int: ...", runtime="def baz(): return 5", error=None) yield Case( stub="async def bingo() -> int: ...", runtime="async def bingo(): return 5", error=None ) @collect_cases def test_arg_name(self) -> Iterator[Case]: yield Case( stub="def bad(number: int, text: str) -> None: ...", runtime="def bad(num, text) -> None: pass", error="bad", ) yield Case( stub="def good_posonly(__number: int, text: str) -> None: ...", runtime="def good_posonly(num, /, text): pass", error=None, ) yield Case( stub="def bad_posonly(__number: int, text: str) -> None: ...", runtime="def bad_posonly(flag, /, text): pass", error="bad_posonly", ) yield Case( stub=""" class BadMethod: def f(self, number: int, text: str) -> None: ... """, runtime=""" class BadMethod: def f(self, n, text): pass """, error="BadMethod.f", ) yield Case( stub=""" class GoodDunder: def __exit__(self, t, v, tb) -> None: ... """, runtime=""" class GoodDunder: def __exit__(self, exc_type, exc_val, exc_tb): pass """, error=None, ) @collect_cases def test_arg_kind(self) -> Iterator[Case]: yield Case( stub="def runtime_kwonly(number: int, text: str) -> None: ...", runtime="def runtime_kwonly(number, *, text): pass", error="runtime_kwonly", ) yield Case( stub="def stub_kwonly(number: int, *, text: str) -> None: ...", runtime="def stub_kwonly(number, text): pass", error="stub_kwonly", ) yield Case( stub="def stub_posonly(__number: int, text: str) -> None: ...", runtime="def stub_posonly(number, text): pass", error="stub_posonly", ) yield Case( stub="def good_posonly(__number: int, text: str) -> None: ...", runtime="def good_posonly(number, /, text): pass", error=None, ) yield Case( stub="def runtime_posonly(number: int, text: str) -> None: ...", runtime="def runtime_posonly(number, /, text): pass", error="runtime_posonly", ) yield Case( stub="def stub_posonly_570(number: int, /, text: str) -> None: ...", runtime="def stub_posonly_570(number, text): pass", error="stub_posonly_570", ) @collect_cases def test_private_parameters(self) -> Iterator[Case]: # Private parameters can optionally be omitted. yield Case( stub="def priv_pos_arg_missing() -> None: ...", runtime="def priv_pos_arg_missing(_p1=None): pass", error=None, ) yield Case( stub="def multi_priv_args() -> None: ...", runtime="def multi_priv_args(_p='', _q=''): pass", error=None, ) yield Case( stub="def priv_kwarg_missing() -> None: ...", runtime="def priv_kwarg_missing(*, _p2=''): pass", error=None, ) # But if they are included, they must be correct. yield Case( stub="def priv_pos_arg_wrong(_p: int = ...) -> None: ...", runtime="def priv_pos_arg_wrong(_p=None): pass", error="priv_pos_arg_wrong", ) yield Case( stub="def priv_kwarg_wrong(*, _p: int = ...) -> None: ...", runtime="def priv_kwarg_wrong(*, _p=None): pass", error="priv_kwarg_wrong", ) # Private parameters must have a default and start with exactly one # underscore. yield Case( stub="def pos_arg_no_default() -> None: ...", runtime="def pos_arg_no_default(_np): pass", error="pos_arg_no_default", ) yield Case( stub="def kwarg_no_default() -> None: ...", runtime="def kwarg_no_default(*, _np): pass", error="kwarg_no_default", ) yield Case( stub="def double_underscore_pos_arg() -> None: ...", runtime="def double_underscore_pos_arg(__np = None): pass", error="double_underscore_pos_arg", ) yield Case( stub="def double_underscore_kwarg() -> None: ...", runtime="def double_underscore_kwarg(*, __np = None): pass", error="double_underscore_kwarg", ) # But spot parameters that are accidentally not marked kw-only and # vice-versa. yield Case( stub="def priv_arg_is_kwonly(_p=...) -> None: ...", runtime="def priv_arg_is_kwonly(*, _p=''): pass", error="priv_arg_is_kwonly", ) yield Case( stub="def priv_arg_is_positional(*, _p=...) -> None: ...", runtime="def priv_arg_is_positional(_p=''): pass", error="priv_arg_is_positional", ) # Private parameters not at the end of the parameter list must be # included so that users can pass the following arguments using # positional syntax. yield Case( stub="def priv_args_not_at_end(*, q='') -> None: ...", runtime="def priv_args_not_at_end(_p='', q=''): pass", error="priv_args_not_at_end", ) @collect_cases def test_default_presence(self) -> Iterator[Case]: yield Case( stub="def f1(text: str = ...) -> None: ...", runtime="def f1(text = 'asdf'): pass", error=None, ) yield Case( stub="def f2(text: str = ...) -> None: ...", runtime="def f2(text): pass", error="f2" ) yield Case( stub="def f3(text: str) -> None: ...", runtime="def f3(text = 'asdf'): pass", error="f3", ) yield Case( stub="def f4(text: str = ...) -> None: ...", runtime="def f4(text = None): pass", error="f4", ) yield Case( stub="def f5(data: bytes = ...) -> None: ...", runtime="def f5(data = 'asdf'): pass", error="f5", ) yield Case( stub=""" from typing import TypeVar _T = TypeVar("_T", bound=str) def f6(text: _T = ...) -> None: ... """, runtime="def f6(text = None): pass", error="f6", ) @collect_cases def test_default_value(self) -> Iterator[Case]: yield Case( stub="def f1(text: str = 'x') -> None: ...", runtime="def f1(text = 'y'): pass", error="f1", ) yield Case( stub='def f2(text: bytes = b"x\'") -> None: ...', runtime='def f2(text = b"x\'"): pass', error=None, ) yield Case( stub='def f3(text: bytes = b"y\'") -> None: ...', runtime='def f3(text = b"x\'"): pass', error="f3", ) yield Case( stub="def f4(text: object = 1) -> None: ...", runtime="def f4(text = 1.0): pass", error="f4", ) yield Case( stub="def f5(text: object = True) -> None: ...", runtime="def f5(text = 1): pass", error="f5", ) yield Case( stub="def f6(text: object = True) -> None: ...", runtime="def f6(text = True): pass", error=None, ) yield Case( stub="def f7(text: object = not True) -> None: ...", runtime="def f7(text = False): pass", error=None, ) yield Case( stub="def f8(text: object = not True) -> None: ...", runtime="def f8(text = True): pass", error="f8", ) yield Case( stub="def f9(text: object = {1: 2}) -> None: ...", runtime="def f9(text = {1: 3}): pass", error="f9", ) yield Case( stub="def f10(text: object = [1, 2]) -> None: ...", runtime="def f10(text = [1, 2]): pass", error=None, ) # Simulate "<unrepresentable>" yield Case( stub="def f11() -> None: ...", runtime=""" def f11(text=None) -> None: pass f11.__text_signature__ = "(text=<unrepresentable>)" """, error="f11", ) @collect_cases def test_static_class_method(self) -> Iterator[Case]: yield Case( stub=""" class Good: @classmethod def f(cls, number: int, text: str) -> None: ... """, runtime=""" class Good: @classmethod def f(cls, number, text): pass """, error=None, ) yield Case( stub=""" class Bad1: def f(cls, number: int, text: str) -> None: ... """, runtime=""" class Bad1: @classmethod def f(cls, number, text): pass """, error="Bad1.f", ) yield Case( stub=""" class Bad2: @classmethod def f(cls, number: int, text: str) -> None: ... """, runtime=""" class Bad2: @staticmethod def f(self, number, text): pass """, error="Bad2.f", ) yield Case( stub=""" class Bad3: @staticmethod def f(cls, number: int, text: str) -> None: ... """, runtime=""" class Bad3: @classmethod def f(self, number, text): pass """, error="Bad3.f", ) yield Case( stub=""" class GoodNew: def __new__(cls, *args, **kwargs): ... """, runtime=""" class GoodNew: def __new__(cls, *args, **kwargs): pass """, error=None, ) @collect_cases def test_arg_mismatch(self) -> Iterator[Case]: yield Case( stub="def f1(a, *, b, c) -> None: ...", runtime="def f1(a, *, b, c): pass", error=None ) yield Case( stub="def f2(a, *, b) -> None: ...", runtime="def f2(a, *, b, c): pass", error="f2" ) yield Case( stub="def f3(a, *, b, c) -> None: ...", runtime="def f3(a, *, b): pass", error="f3" ) yield Case( stub="def f4(a, *, b, c) -> None: ...", runtime="def f4(a, b, *, c): pass", error="f4" ) yield Case( stub="def f5(a, b, *, c) -> None: ...", runtime="def f5(a, *, b, c): pass", error="f5" ) @collect_cases def test_varargs_varkwargs(self) -> Iterator[Case]: yield Case( stub="def f1(*args, **kwargs) -> None: ...", runtime="def f1(*args, **kwargs): pass", error=None, ) yield Case( stub="def f2(*args, **kwargs) -> None: ...", runtime="def f2(**kwargs): pass", error="f2", ) yield Case( stub="def g1(a, b, c, d) -> None: ...", runtime="def g1(a, *args): pass", error=None ) yield Case( stub="def g2(a, b, c, d, *args) -> None: ...", runtime="def g2(a): pass", error="g2" ) yield Case( stub="def g3(a, b, c, d, *args) -> None: ...", runtime="def g3(a, *args): pass", error=None, ) yield Case( stub="def h1(a) -> None: ...", runtime="def h1(a, b, c, d, *args): pass", error="h1" ) yield Case( stub="def h2(a, *args) -> None: ...", runtime="def h2(a, b, c, d): pass", error="h2" ) yield Case( stub="def h3(a, *args) -> None: ...", runtime="def h3(a, b, c, d, *args): pass", error="h3", ) yield Case( stub="def j1(a: int, *args) -> None: ...", runtime="def j1(a): pass", error="j1" ) yield Case( stub="def j2(a: int) -> None: ...", runtime="def j2(a, *args): pass", error="j2" ) yield Case( stub="def j3(a, b, c) -> None: ...", runtime="def j3(a, *args, c): pass", error="j3" ) yield Case(stub="def k1(a, **kwargs) -> None: ...", runtime="def k1(a): pass", error="k1") yield Case( # In theory an error, but led to worse results in practice stub="def k2(a) -> None: ...", runtime="def k2(a, **kwargs): pass", error=None, ) yield Case( stub="def k3(a, b) -> None: ...", runtime="def k3(a, **kwargs): pass", error="k3" ) yield Case( stub="def k4(a, *, b) -> None: ...", runtime="def k4(a, **kwargs): pass", error=None ) yield Case( stub="def k5(a, *, b) -> None: ...", runtime="def k5(a, *, b, c, **kwargs): pass", error="k5", ) yield Case( stub="def k6(a, *, b, **kwargs) -> None: ...", runtime="def k6(a, *, b, c, **kwargs): pass", error="k6", ) @collect_cases def test_overload(self) -> Iterator[Case]: yield Case( stub=""" from typing import overload @overload def f1(a: int, *, c: int = ...) -> int: ... @overload def f1(a: int, b: int, c: int = ...) -> str: ... """, runtime="def f1(a, b = 0, c = 0): pass", error=None, ) yield Case( stub=""" @overload def f2(a: int, *, c: int = ...) -> int: ... @overload def f2(a: int, b: int, c: int = ...) -> str: ... """, runtime="def f2(a, b, c = 0): pass", error="f2", ) yield Case( stub=""" @overload def f3(a: int) -> int: ... @overload def f3(a: int, b: str) -> str: ... """, runtime="def f3(a, b = None): pass", error="f3", ) yield Case( stub=""" @overload def f4(a: int, *args, b: int, **kwargs) -> int: ... @overload def f4(a: str, *args, b: int, **kwargs) -> str: ... """, runtime="def f4(a, *args, b, **kwargs): pass", error=None, ) yield Case( stub=""" @overload def f5(__a: int) -> int: ... @overload def f5(__b: str) -> str: ... """, runtime="def f5(x, /): pass", error=None, ) yield Case( stub=""" from typing import final from typing_extensions import deprecated class Foo: @overload @final def f6(self, __a: int) -> int: ... @overload @deprecated("evil") def f6(self, __b: str) -> str: ... """, runtime=""" class Foo: def f6(self, x, /): pass """, error=None, ) yield Case( stub=""" @overload def f7(a: int, /) -> int: ... @overload def f7(b: str, /) -> str: ... """, runtime="def f7(x, /): pass", error=None, ) yield Case( stub=""" @overload def f8(a: int, c: int = 0, /) -> int: ... @overload def f8(b: str, d: int, /) -> str: ... """, runtime="def f8(x, y, /): pass", error="f8", ) yield Case( stub=""" @overload def f9(a: int, c: int = 0, /) -> int: ... @overload def f9(b: str, d: int, /) -> str: ... """, runtime="def f9(x, y=0, /): pass", error=None, ) yield Case( stub=""" class Bar: @overload def f1(self) -> int: ... @overload def f1(self, a: int, /) -> int: ... @overload def f2(self, a: int, /) -> int: ... @overload def f2(self, a: str, /) -> int: ... """, runtime=""" class Bar: def f1(self, *a) -> int: ... def f2(self, *a) -> int: ... """, error=None, ) @collect_cases def test_property(self) -> Iterator[Case]: yield Case( stub=""" class Good: @property def read_only_attr(self) -> int: ... """, runtime=""" class Good: @property def read_only_attr(self): return 1 """, error=None, ) yield Case( stub=""" class Bad: @property def f(self) -> int: ... """, runtime=""" class Bad: def f(self) -> int: return 1 """, error="Bad.f", ) yield Case( stub=""" class GoodReadOnly: @property def f(self) -> int: ... """, runtime=""" class GoodReadOnly: f = 1 """, error=None, ) yield Case( stub=""" class BadReadOnly: @property def f(self) -> str: ... """, runtime=""" class BadReadOnly: f = 1 """, error="BadReadOnly.f", ) yield Case( stub=""" class Y: @property def read_only_attr(self) -> int: ... @read_only_attr.setter def read_only_attr(self, val: int) -> None: ... """, runtime=""" class Y: @property def read_only_attr(self): return 5 """, error="Y.read_only_attr", ) yield Case( stub=""" class Z: @property def read_write_attr(self) -> int: ... @read_write_attr.setter def read_write_attr(self, val: int) -> None: ... """, runtime=""" class Z: @property def read_write_attr(self): return self._val @read_write_attr.setter def read_write_attr(self, val): self._val = val """, error=None, ) yield Case( stub=""" class FineAndDandy: @property def attr(self) -> int: ... """, runtime=""" class _EvilDescriptor: def __get__(self, instance, ownerclass=None): if instance is None: raise AttributeError('no') return 42 def __set__(self, instance, value): raise AttributeError('no') class FineAndDandy: attr = _EvilDescriptor() """, error=None, ) @collect_cases def test_cached_property(self) -> Iterator[Case]: yield Case( stub=""" from functools import cached_property class Good: @cached_property def read_only_attr(self) -> int: ... @cached_property def read_only_attr2(self) -> int: ... """, runtime=""" import functools as ft from functools import cached_property class Good: @cached_property def read_only_attr(self): return 1 @ft.cached_property def read_only_attr2(self): return 1 """, error=None, ) yield Case( stub=""" from functools import cached_property class Bad: @cached_property def f(self) -> int: ... """, runtime=""" class Bad: def f(self) -> int: return 1 """, error="Bad.f", ) yield Case( stub=""" from functools import cached_property class GoodCachedAttr: @cached_property def f(self) -> int: ... """, runtime=""" class GoodCachedAttr: f = 1 """, error=None, ) yield Case( stub=""" from functools import cached_property class BadCachedAttr: @cached_property def f(self) -> str: ... """, runtime=""" class BadCachedAttr: f = 1 """, error="BadCachedAttr.f", ) yield Case( stub=""" from functools import cached_property from typing import final class FinalGood: @cached_property @final def attr(self) -> int: ... """, runtime=""" from functools import cached_property from typing import final class FinalGood: @cached_property @final def attr(self): return 1 """, error=None, ) yield Case( stub=""" from functools import cached_property class FinalBad: @cached_property def attr(self) -> int: ... """, runtime=""" from functools import cached_property from typing_extensions import final class FinalBad: @cached_property @final def attr(self): return 1 """, error="FinalBad.attr", ) @collect_cases def test_var(self) -> Iterator[Case]: yield Case(stub="x1: int", runtime="x1 = 5", error=None) yield Case(stub="x2: str", runtime="x2 = 5", error="x2") yield Case("from typing import Tuple", "", None) # dummy case yield Case( stub=""" x3: Tuple[int, int] """, runtime="x3 = (1, 3)", error=None, ) yield Case( stub=""" x4: Tuple[int, int] """, runtime="x4 = (1, 3, 5)", error="x4", ) yield Case(stub="x5: int", runtime="def x5(a, b): pass", error="x5") yield Case( stub="def foo(a: int, b: int) -> None: ...\nx6 = foo", runtime="def foo(a, b): pass\ndef x6(c, d): pass", error="x6", ) yield Case( stub=""" class X: f: int """, runtime=""" class X: def __init__(self): self.f = "asdf" """, error=None, ) yield Case( stub=""" class Y: read_only_attr: int """, runtime=""" class Y: @property def read_only_attr(self): return 5 """, error="Y.read_only_attr", ) yield Case( stub=""" class Z: read_write_attr: int """, runtime=""" class Z: @property def read_write_attr(self): return self._val @read_write_attr.setter def read_write_attr(self, val): self._val = val """, error=None, ) @collect_cases def test_type_alias(self) -> Iterator[Case]: yield Case( stub=""" import collections.abc import re import typing from typing import Callable, Dict, Generic, Iterable, List, Match, Tuple, TypeVar, Union """, runtime=""" import collections.abc import re from typing import Callable, Dict, Generic, Iterable, List, Match, Tuple, TypeVar, Union """, error=None, ) yield Case( stub=""" class X: def f(self) -> None: ... Y = X """, runtime=""" class X: def f(self) -> None: ... class Y: ... """, error="Y.f", ) yield Case(stub="A = Tuple[int, str]", runtime="A = (int, str)", error="A") # Error if an alias isn't present at runtime... yield Case(stub="B = str", runtime="", error="B") # ... but only if the alias isn't private yield Case(stub="_C = int", runtime="", error=None) yield Case( stub=""" D = tuple[str, str] E = Tuple[int, int, int] F = Tuple[str, int] """, runtime=""" D = Tuple[str, str] E = Tuple[int, int, int] F = List[str] """, error="F", ) yield Case( stub=""" G = str | int H = Union[str, bool] I = str | int """, runtime=""" G = Union[str, int] H = Union[str, bool] I = str """, error="I", ) yield Case( stub=""" K = dict[str, str] L = Dict[int, int] KK = collections.abc.Iterable[str] LL = typing.Iterable[str] """, runtime=""" K = Dict[str, str] L = Dict[int, int] KK = Iterable[str] LL = Iterable[str] """, error=None, ) yield Case( stub=""" _T = TypeVar("_T") class _Spam(Generic[_T]): def foo(self) -> None: ... IntFood = _Spam[int] """, runtime=""" _T = TypeVar("_T") class _Bacon(Generic[_T]): def foo(self, arg): pass IntFood = _Bacon[int] """, error="IntFood.foo", ) yield Case(stub="StrList = list[str]", runtime="StrList = ['foo', 'bar']", error="StrList") yield Case( stub=""" N = typing.Callable[[str], bool] O = collections.abc.Callable[[int], str] P = typing.Callable[[str], bool] """, runtime=""" N = Callable[[str], bool] O = Callable[[int], str] P = int """, error="P", ) yield Case( stub=""" class Foo: class Bar: ... BarAlias = Foo.Bar """, runtime=""" class Foo: class Bar: pass BarAlias = Foo.Bar """, error=None, ) yield Case( stub=""" from io import StringIO StringIOAlias = StringIO """, runtime=""" from _io import StringIO StringIOAlias = StringIO """, error=None, ) yield Case(stub="M = Match[str]", runtime="M = Match[str]", error=None) yield Case( stub=""" class Baz: def fizz(self) -> None: ... BazAlias = Baz """, runtime=""" class Baz: def fizz(self): pass BazAlias = Baz Baz.__name__ = Baz.__qualname__ = Baz.__module__ = "New" """, error=None, ) yield Case( stub=""" class FooBar: __module__: None # type: ignore def fizz(self) -> None: ... FooBarAlias = FooBar """, runtime=""" class FooBar: def fizz(self): pass FooBarAlias = FooBar FooBar.__module__ = None """, error=None, ) if sys.version_info >= (3, 10): yield Case( stub=""" Q = Dict[str, str] R = dict[int, int] S = Tuple[int, int] T = tuple[str, str] U = int | str V = Union[int, str] W = typing.Callable[[str], bool] Z = collections.abc.Callable[[str], bool] QQ = typing.Iterable[str] RR = collections.abc.Iterable[str] MM = typing.Match[str] MMM = re.Match[str] """, runtime=""" Q = dict[str, str] R = dict[int, int] S = tuple[int, int] T = tuple[str, str] U = int | str V = int | str W = collections.abc.Callable[[str], bool] Z = collections.abc.Callable[[str], bool] QQ = collections.abc.Iterable[str] RR = collections.abc.Iterable[str] MM = re.Match[str] MMM = re.Match[str] """, error=None, ) @collect_cases def test_enum(self) -> Iterator[Case]: yield Case(stub="import enum", runtime="import enum", error=None) yield Case( stub=""" class X(enum.Enum): a: int b: str c: str """, runtime=""" class X(enum.Enum): a = 1 b = "asdf" c = 2 """, error="X.c", ) yield Case( stub=""" class Flags1(enum.Flag): a: int b: int def foo(x: Flags1 = ...) -> None: ... """, runtime=""" class Flags1(enum.Flag): a = 1 b = 2 def foo(x=Flags1.a|Flags1.b): pass """, error=None, ) yield Case( stub=""" class Flags2(enum.Flag): a: int b: int def bar(x: Flags2 | None = None) -> None: ... """, runtime=""" class Flags2(enum.Flag): a = 1 b = 2 def bar(x=Flags2.a|Flags2.b): pass """, error="bar", ) yield Case( stub=""" class Flags3(enum.Flag): a: int b: int def baz(x: Flags3 | None = ...) -> None: ... """, runtime=""" class Flags3(enum.Flag): a = 1 b = 2 def baz(x=Flags3(0)): pass """, error=None, ) yield Case( runtime=""" import enum class SomeObject: ... class WeirdEnum(enum.Enum): a = SomeObject() b = SomeObject() """, stub=""" import enum class SomeObject: ... class WeirdEnum(enum.Enum): _value_: SomeObject a = ... b = ... """, error=None, ) yield Case( stub=""" class Flags4(enum.Flag): a: int b: int def spam(x: Flags4 | None = None) -> None: ... """, runtime=""" class Flags4(enum.Flag): a = 1 b = 2 def spam(x=Flags4(0)): pass """, error="spam", ) yield Case( stub=""" from typing_extensions import Final, Literal class BytesEnum(bytes, enum.Enum): a: bytes FOO: Literal[BytesEnum.a] BAR: Final = BytesEnum.a BAZ: BytesEnum EGGS: bytes """, runtime=""" class BytesEnum(bytes, enum.Enum): a = b'foo' FOO = BytesEnum.a BAR = BytesEnum.a BAZ = BytesEnum.a EGGS = BytesEnum.a """, error=None, ) @collect_cases def test_decorator(self) -> Iterator[Case]: yield Case( stub=""" from typing import Any, Callable def decorator(f: Callable[[], int]) -> Callable[..., Any]: ... @decorator def f() -> Any: ... """, runtime=""" def decorator(f): return f @decorator def f(): return 3 """, error=None, ) @collect_cases def test_all_at_runtime_not_stub(self) -> Iterator[Case]: yield Case( stub="Z: int", runtime=""" __all__ = [] Z = 5""", error=None, ) @collect_cases def test_all_in_stub_not_at_runtime(self) -> Iterator[Case]: yield Case(stub="__all__ = ()", runtime="", error="__all__") @collect_cases def test_all_in_stub_different_to_all_at_runtime(self) -> Iterator[Case]: # We *should* emit an error with the module name itself + __all__, # if the stub *does* define __all__, # but the stub's __all__ is inconsistent with the runtime's __all__ yield Case( stub=""" __all__ = ['foo'] foo: str """, runtime=""" __all__ = [] foo = 'foo' """, error="__all__", ) @collect_cases def test_missing(self) -> Iterator[Case]: yield Case(stub="x = 5", runtime="", error="x") yield Case(stub="def f(): ...", runtime="", error="f") yield Case(stub="class X: ...", runtime="", error="X") yield Case( stub=""" from typing import overload @overload def h(x: int): ... @overload def h(x: str): ... """, runtime="", error="h", ) yield Case(stub="", runtime="__all__ = []", error=None) # dummy case yield Case(stub="", runtime="__all__ += ['y']\ny = 5", error="y") yield Case(stub="", runtime="__all__ += ['g']\ndef g(): pass", error="g") # Here we should only check that runtime has B, since the stub explicitly re-exports it yield Case( stub="from mystery import A, B as B, C as D # type: ignore", runtime="", error="B" ) yield Case( stub="class Y: ...", runtime="__all__ += ['Y']\nclass Y:\n def __or__(self, other): return self|other", error="Y.__or__", ) yield Case( stub="class Z: ...", runtime="__all__ += ['Z']\nclass Z:\n def __reduce__(self): return (Z,)", error=None, ) @collect_cases def test_missing_no_runtime_all(self) -> Iterator[Case]: yield Case(stub="", runtime="import sys", error=None) yield Case(stub="", runtime="def g(): ...", error="g") yield Case(stub="", runtime="CONSTANT = 0", error="CONSTANT") yield Case(stub="", runtime="import re; constant = re.compile('foo')", error="constant") yield Case(stub="", runtime="from json.scanner import NUMBER_RE", error=None) yield Case(stub="", runtime="from string import ascii_letters", error=None) @collect_cases def test_missing_no_runtime_all_terrible(self) -> Iterator[Case]: yield Case( stub="", runtime=""" import sys import types import __future__ _m = types.SimpleNamespace() _m.annotations = __future__.annotations sys.modules["_terrible_stubtest_test_module"] = _m from _terrible_stubtest_test_module import * assert annotations """, error=None, ) @collect_cases def test_non_public_1(self) -> Iterator[Case]: yield Case( stub="__all__: list[str]", runtime="", error=f"{TEST_MODULE_NAME}.__all__" ) # dummy case yield Case(stub="_f: int", runtime="def _f(): ...", error="_f") @collect_cases def test_non_public_2(self) -> Iterator[Case]: yield Case(stub="__all__: list[str] = ['f']", runtime="__all__ = ['f']", error=None) yield Case(stub="f: int", runtime="def f(): ...", error="f") yield Case(stub="g: int", runtime="def g(): ...", error="g") @collect_cases def test_dunders(self) -> Iterator[Case]: yield Case( stub="class A:\n def __init__(self, a: int, b: int) -> None: ...", runtime="class A:\n def __init__(self, a, bx): pass", error="A.__init__", ) yield Case( stub="class B:\n def __call__(self, c: int, d: int) -> None: ...", runtime="class B:\n def __call__(self, c, dx): pass", error="B.__call__", ) yield Case( stub=( "class C:\n" " def __init_subclass__(\n" " cls, e: int = ..., **kwargs: int\n" " ) -> None: ...\n" ), runtime="class C:\n def __init_subclass__(cls, e=1, **kwargs): pass", error=None, ) if sys.version_info >= (3, 9): yield Case( stub="class D:\n def __class_getitem__(cls, type: type) -> type: ...", runtime="class D:\n def __class_getitem__(cls, type): ...", error=None, ) @collect_cases def test_not_subclassable(self) -> Iterator[Case]: yield Case( stub="class CanBeSubclassed: ...", runtime="class CanBeSubclassed: ...", error=None ) yield Case( stub="class CannotBeSubclassed:\n def __init_subclass__(cls) -> None: ...", runtime="class CannotBeSubclassed:\n def __init_subclass__(cls): raise TypeError", error="CannotBeSubclassed", ) @collect_cases def test_has_runtime_final_decorator(self) -> Iterator[Case]: yield Case( stub="from typing_extensions import final", runtime=""" import functools from typing_extensions import final """, error=None, ) yield Case( stub=""" @final class A: ... """, runtime=""" @final class A: ... """, error=None, ) yield Case( # Runtime can miss `@final` decorator stub=""" @final class B: ... """, runtime=""" class B: ... """, error=None, ) yield Case( # Stub cannot miss `@final` decorator stub=""" class C: ... """, runtime=""" @final class C: ... """, error="C", ) yield Case( stub=""" class D: @final def foo(self) -> None: ... @final @staticmethod def bar() -> None: ... @staticmethod @final def bar2() -> None: ... @final @classmethod def baz(cls) -> None: ... @classmethod @final def baz2(cls) -> None: ... @property @final def eggs(self) -> int: ... @final @property def eggs2(self) -> int: ... @final def ham(self, obj: int) -> int: ... """, runtime=""" class D: @final def foo(self): pass @final @staticmethod def bar(): pass @staticmethod @final def bar2(): pass @final @classmethod def baz(cls): pass @classmethod @final def baz2(cls): pass @property @final def eggs(self): return 42 @final @property def eggs2(self): pass @final @functools.lru_cache() def ham(self, obj): return obj * 2 """, error=None, ) # Stub methods are allowed to have @final even if the runtime doesn't... yield Case( stub=""" class E: @final def foo(self) -> None: ... @final @staticmethod def bar() -> None: ... @staticmethod @final def bar2() -> None: ... @final @classmethod def baz(cls) -> None: ... @classmethod @final def baz2(cls) -> None: ... @property @final def eggs(self) -> int: ... @final @property def eggs2(self) -> int: ... @final def ham(self, obj: int) -> int: ... """, runtime=""" class E: def foo(self): pass @staticmethod def bar(): pass @staticmethod def bar2(): pass @classmethod def baz(cls): pass @classmethod def baz2(cls): pass @property def eggs(self): return 42 @property def eggs2(self): return 42 @functools.lru_cache() def ham(self, obj): return obj * 2 """, error=None, ) # ...But if the runtime has @final, the stub must have it as well yield Case( stub=""" class F: def foo(self) -> None: ... """, runtime=""" class F: @final def foo(self): pass """, error="F.foo", ) yield Case( stub=""" class G: @staticmethod def foo() -> None: ... """, runtime=""" class G: @final @staticmethod def foo(): pass """, error="G.foo", ) yield Case( stub=""" class H: @staticmethod def foo() -> None: ... """, runtime=""" class H: @staticmethod @final def foo(): pass """, error="H.foo", ) yield Case( stub=""" class I: @classmethod def foo(cls) -> None: ... """, runtime=""" class I: @final @classmethod def foo(cls): pass """, error="I.foo", ) yield Case( stub=""" class J: @classmethod def foo(cls) -> None: ... """, runtime=""" class J: @classmethod @final def foo(cls): pass """, error="J.foo", ) yield Case( stub=""" class K: @property def foo(self) -> int: ... """, runtime=""" class K: @property @final def foo(self): return 42 """, error="K.foo", ) # This test wouldn't pass, # because the runtime can't set __final__ on instances of builtins.property, # so stubtest has non way of knowing that the runtime was decorated with @final: # # yield Case( # stub=""" # class K2: # @property # def foo(self) -> int: ... # """, # runtime=""" # class K2: # @final # @property # def foo(self): return 42 # """, # error="K2.foo", # ) yield Case( stub=""" class L: def foo(self, obj: int) -> int: ... """, runtime=""" class L: @final @functools.lru_cache() def foo(self, obj): return obj * 2 """, error="L.foo", ) @collect_cases def test_name_mangling(self) -> Iterator[Case]: yield Case( stub=""" class X: def __mangle_good(self, text: str) -> None: ... def __mangle_bad(self, number: int) -> None: ... """, runtime=""" class X: def __mangle_good(self, text): pass def __mangle_bad(self, text): pass """, error="X.__mangle_bad", ) yield Case( stub=""" class Klass: class __Mangled1: class __Mangled2: def __mangle_good(self, text: str) -> None: ... def __mangle_bad(self, number: int) -> None: ... """, runtime=""" class Klass: class __Mangled1: class __Mangled2: def __mangle_good(self, text): pass def __mangle_bad(self, text): pass """, error="Klass.__Mangled1.__Mangled2.__mangle_bad", ) yield Case( stub=""" class __Dunder__: def __mangle_good(self, text: str) -> None: ... def __mangle_bad(self, number: int) -> None: ... """, runtime=""" class __Dunder__: def __mangle_good(self, text): pass def __mangle_bad(self, text): pass """, error="__Dunder__.__mangle_bad", ) yield Case( stub=""" class _Private: def __mangle_good(self, text: str) -> None: ... def __mangle_bad(self, number: int) -> None: ... """, runtime=""" class _Private: def __mangle_good(self, text): pass def __mangle_bad(self, text): pass """, error="_Private.__mangle_bad", ) @collect_cases def test_mro(self) -> Iterator[Case]: yield Case( stub=""" class A: def foo(self, x: int) -> None: ... class B(A): pass class C(A): pass """, runtime=""" class A: def foo(self, x: int) -> None: ... class B(A): def foo(self, x: int) -> None: ... class C(A): def foo(self, y: int) -> None: ... """, error="C.foo", ) yield Case( stub=""" class X: ... """, runtime=""" class X: def __init__(self, x): pass """, error="X.__init__", ) @collect_cases def test_good_literal(self) -> Iterator[Case]: yield Case( stub=r""" from typing_extensions import Literal import enum class Color(enum.Enum): RED: int NUM: Literal[1] CHAR: Literal['a'] FLAG: Literal[True] NON: Literal[None] BYT1: Literal[b'abc'] BYT2: Literal[b'\x90'] ENUM: Literal[Color.RED] """, runtime=r""" import enum class Color(enum.Enum): RED = 3 NUM = 1 CHAR = 'a' NON = None FLAG = True BYT1 = b"abc" BYT2 = b'\x90' ENUM = Color.RED """, error=None, ) @collect_cases def test_bad_literal(self) -> Iterator[Case]: yield Case("from typing_extensions import Literal", "", None) # dummy case yield Case( stub="INT_FLOAT_MISMATCH: Literal[1]", runtime="INT_FLOAT_MISMATCH = 1.0", error="INT_FLOAT_MISMATCH", ) yield Case(stub="WRONG_INT: Literal[1]", runtime="WRONG_INT = 2", error="WRONG_INT") yield Case(stub="WRONG_STR: Literal['a']", runtime="WRONG_STR = 'b'", error="WRONG_STR") yield Case( stub="BYTES_STR_MISMATCH: Literal[b'value']", runtime="BYTES_STR_MISMATCH = 'value'", error="BYTES_STR_MISMATCH", ) yield Case( stub="STR_BYTES_MISMATCH: Literal['value']", runtime="STR_BYTES_MISMATCH = b'value'", error="STR_BYTES_MISMATCH", ) yield Case( stub="WRONG_BYTES: Literal[b'abc']", runtime="WRONG_BYTES = b'xyz'", error="WRONG_BYTES", ) yield Case( stub="WRONG_BOOL_1: Literal[True]", runtime="WRONG_BOOL_1 = False", error="WRONG_BOOL_1", ) yield Case( stub="WRONG_BOOL_2: Literal[False]", runtime="WRONG_BOOL_2 = True", error="WRONG_BOOL_2", ) @collect_cases def test_special_subtype(self) -> Iterator[Case]: yield Case( stub=""" b1: bool b2: bool b3: bool """, runtime=""" b1 = 0 b2 = 1 b3 = 2 """, error="b3", ) yield Case( stub=""" from typing_extensions import TypedDict class _Options(TypedDict): a: str b: int opt1: _Options opt2: _Options opt3: _Options """, runtime=""" opt1 = {"a": "3.", "b": 14} opt2 = {"some": "stuff"} # false negative opt3 = 0 """, error="opt3", ) @collect_cases def test_runtime_typing_objects(self) -> Iterator[Case]: yield Case( stub="from typing_extensions import Protocol, TypedDict", runtime="from typing_extensions import Protocol, TypedDict", error=None, ) yield Case( stub=""" class X(Protocol): bar: int def foo(self, x: int, y: bytes = ...) -> str: ... """, runtime=""" class X(Protocol): bar: int def foo(self, x: int, y: bytes = ...) -> str: ... """, error=None, ) yield Case( stub=""" class Y(TypedDict): a: int """, runtime=""" class Y(TypedDict): a: int """, error=None, ) @collect_cases def test_named_tuple(self) -> Iterator[Case]: yield Case( stub="from typing import NamedTuple", runtime="from typing import NamedTuple", error=None, ) yield Case( stub=""" class X1(NamedTuple): bar: int foo: str = ... """, runtime=""" class X1(NamedTuple): bar: int foo: str = 'a' """, error=None, ) yield Case( stub=""" class X2(NamedTuple): bar: int foo: str """, runtime=""" class X2(NamedTuple): bar: int foo: str = 'a' """, # `__new__` will miss a default value for a `foo` parameter, # but we don't generate special errors for `foo` missing `...` part. error="X2.__new__", ) @collect_cases def test_named_tuple_typing_and_collections(self) -> Iterator[Case]: yield Case( stub="from typing import NamedTuple", runtime="from collections import namedtuple", error=None, ) yield Case( stub=""" class X1(NamedTuple): bar: int foo: str = ... """, runtime=""" X1 = namedtuple('X1', ['bar', 'foo'], defaults=['a']) """, error=None, ) yield Case( stub=""" class X2(NamedTuple): bar: int foo: str """, runtime=""" X2 = namedtuple('X1', ['bar', 'foo'], defaults=['a']) """, error="X2.__new__", ) @collect_cases def test_type_var(self) -> Iterator[Case]: yield Case( stub="from typing import TypeVar", runtime="from typing import TypeVar", error=None ) yield Case(stub="A = TypeVar('A')", runtime="A = TypeVar('A')", error=None) yield Case(stub="B = TypeVar('B')", runtime="B = 5", error="B") if sys.version_info >= (3, 10): yield Case( stub="from typing import ParamSpec", runtime="from typing import ParamSpec", error=None, ) yield Case(stub="C = ParamSpec('C')", runtime="C = ParamSpec('C')", error=None) @collect_cases def test_metaclass_match(self) -> Iterator[Case]: yield Case(stub="class Meta(type): ...", runtime="class Meta(type): ...", error=None) yield Case(stub="class A0: ...", runtime="class A0: ...", error=None) yield Case( stub="class A1(metaclass=Meta): ...", runtime="class A1(metaclass=Meta): ...", error=None, ) yield Case(stub="class A2: ...", runtime="class A2(metaclass=Meta): ...", error="A2") yield Case(stub="class A3(metaclass=Meta): ...", runtime="class A3: ...", error="A3") # Explicit `type` metaclass can always be added in any part: yield Case( stub="class T1(metaclass=type): ...", runtime="class T1(metaclass=type): ...", error=None, ) yield Case(stub="class T2: ...", runtime="class T2(metaclass=type): ...", error=None) yield Case(stub="class T3(metaclass=type): ...", runtime="class T3: ...", error=None) # Explicit check that `_protected` names are also supported: yield Case(stub="class _P1(type): ...", runtime="class _P1(type): ...", error=None) yield Case(stub="class P2: ...", runtime="class P2(metaclass=_P1): ...", error="P2") # With inheritance: yield Case( stub=""" class I1(metaclass=Meta): ... class S1(I1): ... """, runtime=""" class I1(metaclass=Meta): ... class S1(I1): ... """, error=None, ) yield Case( stub=""" class I2(metaclass=Meta): ... class S2: ... # missing inheritance """, runtime=""" class I2(metaclass=Meta): ... class S2(I2): ... """, error="S2", ) @collect_cases def test_metaclass_abcmeta(self) -> Iterator[Case]: # Handling abstract metaclasses is special: yield Case(stub="from abc import ABCMeta", runtime="from abc import ABCMeta", error=None) yield Case( stub="class A1(metaclass=ABCMeta): ...", runtime="class A1(metaclass=ABCMeta): ...", error=None, ) # Stubs cannot miss abstract metaclass: yield Case(stub="class A2: ...", runtime="class A2(metaclass=ABCMeta): ...", error="A2") # But, stubs can add extra abstract metaclass, this might be a typing hack: yield Case(stub="class A3(metaclass=ABCMeta): ...", runtime="class A3: ...", error=None) @collect_cases def test_abstract_methods(self) -> Iterator[Case]: yield Case( stub=""" from abc import abstractmethod from typing import overload """, runtime="from abc import abstractmethod", error=None, ) yield Case( stub=""" class A1: def some(self) -> None: ... """, runtime=""" class A1: @abstractmethod def some(self) -> None: ... """, error="A1.some", ) yield Case( stub=""" class A2: @abstractmethod def some(self) -> None: ... """, runtime=""" class A2: @abstractmethod def some(self) -> None: ... """, error=None, ) yield Case( stub=""" class A3: @overload def some(self, other: int) -> str: ... @overload def some(self, other: str) -> int: ... """, runtime=""" class A3: @abstractmethod def some(self, other) -> None: ... """, error="A3.some", ) yield Case( stub=""" class A4: @overload @abstractmethod def some(self, other: int) -> str: ... @overload @abstractmethod def some(self, other: str) -> int: ... """, runtime=""" class A4: @abstractmethod def some(self, other) -> None: ... """, error=None, ) yield Case( stub=""" class A5: @abstractmethod @overload def some(self, other: int) -> str: ... @abstractmethod @overload def some(self, other: str) -> int: ... """, runtime=""" class A5: @abstractmethod def some(self, other) -> None: ... """, error=None, ) # Runtime can miss `@abstractmethod`: yield Case( stub=""" class A6: @abstractmethod def some(self) -> None: ... """, runtime=""" class A6: def some(self) -> None: ... """, error=None, ) @collect_cases def test_abstract_properties(self) -> Iterator[Case]: # TODO: test abstract properties with setters yield Case( stub="from abc import abstractmethod", runtime="from abc import abstractmethod", error=None, ) # Ensure that `@property` also can be abstract: yield Case( stub=""" class AP1: @property def some(self) -> int: ... """, runtime=""" class AP1: @property @abstractmethod def some(self) -> int: ... """, error="AP1.some", ) yield Case( stub=""" class AP1_2: def some(self) -> int: ... # missing `@property` decorator """, runtime=""" class AP1_2: @property @abstractmethod def some(self) -> int: ... """, error="AP1_2.some", ) yield Case( stub=""" class AP2: @property @abstractmethod def some(self) -> int: ... """, runtime=""" class AP2: @property @abstractmethod def some(self) -> int: ... """, error=None, ) # Runtime can miss `@abstractmethod`: yield Case( stub=""" class AP3: @property @abstractmethod def some(self) -> int: ... """, runtime=""" class AP3: @property def some(self) -> int: ... """, error=None, ) @collect_cases def test_type_check_only(self) -> Iterator[Case]: yield Case( stub="from typing import type_check_only, overload", runtime="from typing import overload", error=None, ) # You can have public types that are only defined in stubs # with `@type_check_only`: yield Case( stub=""" @type_check_only class A1: ... """, runtime="", error=None, ) # Having `@type_check_only` on a type that exists at runtime is an error yield Case( stub=""" @type_check_only class A2: ... """, runtime="class A2: ...", error="A2", ) # The same is true for NamedTuples and TypedDicts: yield Case( stub="from typing_extensions import NamedTuple, TypedDict", runtime="from typing_extensions import NamedTuple, TypedDict", error=None, ) yield Case( stub=""" @type_check_only class NT1(NamedTuple): ... """, runtime="class NT1(NamedTuple): ...", error="NT1", ) yield Case( stub=""" @type_check_only class TD1(TypedDict): ... """, runtime="class TD1(TypedDict): ...", error="TD1", ) # The same is true for functions: yield Case( stub=""" @type_check_only def func1() -> None: ... """, runtime="", error=None, ) yield Case( stub=""" @type_check_only def func2() -> None: ... """, runtime="def func2() -> None: ...", error="func2", ) def remove_color_code(s: str) -> str: return re.sub("\\x1b.*?m", "", s) # this works! class StubtestMiscUnit(unittest.TestCase): def test_output(self) -> None: output = run_stubtest( stub="def bad(number: int, text: str) -> None: ...", runtime="def bad(num, text): pass", options=[], ) expected = ( f'error: {TEST_MODULE_NAME}.bad is inconsistent, stub argument "number" differs ' 'from runtime argument "num"\n' f"Stub: in file {TEST_MODULE_NAME}.pyi:1\n" "def (number: builtins.int, text: builtins.str)\n" f"Runtime: in file {TEST_MODULE_NAME}.py:1\ndef (num, text)\n\n" "Found 1 error (checked 1 module)\n" ) assert output == expected output = run_stubtest( stub="def bad(number: int, text: str) -> None: ...", runtime="def bad(num, text): pass", options=["--concise"], ) expected = ( "{}.bad is inconsistent, " 'stub argument "number" differs from runtime argument "num"\n'.format(TEST_MODULE_NAME) ) assert output == expected def test_ignore_flags(self) -> None: output = run_stubtest( stub="", runtime="__all__ = ['f']\ndef f(): pass", options=["--ignore-missing-stub"] ) assert output == "Success: no issues found in 1 module\n" output = run_stubtest(stub="", runtime="def f(): pass", options=["--ignore-missing-stub"]) assert output == "Success: no issues found in 1 module\n" output = run_stubtest( stub="def f(__a): ...", runtime="def f(a): pass", options=["--ignore-positional-only"] ) assert output == "Success: no issues found in 1 module\n" def test_allowlist(self) -> None: # Can't use this as a context because Windows allowlist = tempfile.NamedTemporaryFile(mode="w+", delete=False) try: with allowlist: allowlist.write(f"{TEST_MODULE_NAME}.bad # comment\n# comment") output = run_stubtest( stub="def bad(number: int, text: str) -> None: ...", runtime="def bad(asdf, text): pass", options=["--allowlist", allowlist.name], ) assert output == "Success: no issues found in 1 module\n" # test unused entry detection output = run_stubtest(stub="", runtime="", options=["--allowlist", allowlist.name]) assert output == ( f"note: unused allowlist entry {TEST_MODULE_NAME}.bad\n" "Found 1 error (checked 1 module)\n" ) output = run_stubtest( stub="", runtime="", options=["--allowlist", allowlist.name, "--ignore-unused-allowlist"], ) assert output == "Success: no issues found in 1 module\n" # test regex matching with open(allowlist.name, mode="w+") as f: f.write(f"{TEST_MODULE_NAME}.b.*\n") f.write("(unused_missing)?\n") f.write("unused.*\n") output = run_stubtest( stub=textwrap.dedent( """ def good() -> None: ... def bad(number: int) -> None: ... def also_bad(number: int) -> None: ... """.lstrip( "\n" ) ), runtime=textwrap.dedent( """ def good(): pass def bad(asdf): pass def also_bad(asdf): pass """.lstrip( "\n" ) ), options=["--allowlist", allowlist.name, "--generate-allowlist"], ) assert output == ( f"note: unused allowlist entry unused.*\n{TEST_MODULE_NAME}.also_bad\n" ) finally: os.unlink(allowlist.name) def test_mypy_build(self) -> None: output = run_stubtest(stub="+", runtime="", options=[]) assert output == ( "error: not checking stubs due to failed mypy compile:\n{}.pyi:1: " "error: invalid syntax [syntax]\n".format(TEST_MODULE_NAME) ) output = run_stubtest(stub="def f(): ...\ndef f(): ...", runtime="", options=[]) assert output == ( "error: not checking stubs due to mypy build errors:\n{}.pyi:2: " 'error: Name "f" already defined on line 1 [no-redef]\n'.format(TEST_MODULE_NAME) ) def test_missing_stubs(self) -> None: output = io.StringIO() with contextlib.redirect_stdout(output): test_stubs(parse_options(["not_a_module"])) assert remove_color_code(output.getvalue()) == ( "error: not_a_module failed to find stubs\n" "Stub:\nMISSING\nRuntime:\nN/A\n\n" "Found 1 error (checked 1 module)\n" ) def test_only_py(self) -> None: # in this case, stubtest will check the py against itself # this is useful to support packages with a mix of stubs and inline types with use_tmp_dir(TEST_MODULE_NAME): with open(f"{TEST_MODULE_NAME}.py", "w") as f: f.write("a = 1") output = io.StringIO() with contextlib.redirect_stdout(output): test_stubs(parse_options([TEST_MODULE_NAME])) output_str = remove_color_code(output.getvalue()) assert output_str == "Success: no issues found in 1 module\n" def test_get_typeshed_stdlib_modules(self) -> None: stdlib = mypy.stubtest.get_typeshed_stdlib_modules(None, (3, 7)) assert "builtins" in stdlib assert "os" in stdlib assert "os.path" in stdlib assert "asyncio" in stdlib assert "graphlib" not in stdlib assert "formatter" in stdlib assert "contextvars" in stdlib # 3.7+ assert "importlib.metadata" not in stdlib stdlib = mypy.stubtest.get_typeshed_stdlib_modules(None, (3, 10)) assert "graphlib" in stdlib assert "formatter" not in stdlib assert "importlib.metadata" in stdlib def test_signature(self) -> None: def f(a: int, b: int, *, c: int, d: int = 0, **kwargs: Any) -> None: pass assert ( str(mypy.stubtest.Signature.from_inspect_signature(inspect.signature(f))) == "def (a, b, *, c, d = ..., **kwargs)" ) def test_builtin_signature_with_unrepresentable_default(self) -> None: sig = mypy.stubtest.safe_inspect_signature(bytes.hex) assert sig is not None assert ( str(mypy.stubtest.Signature.from_inspect_signature(sig)) == "def (self, sep = ..., bytes_per_sep = ...)" ) def test_config_file(self) -> None: runtime = "temp = 5\n" stub = "from decimal import Decimal\ntemp: Decimal\n" config_file = f"[mypy]\nplugins={root_dir}/test-data/unit/plugins/decimal_to_int.py\n" output = run_stubtest(stub=stub, runtime=runtime, options=[]) assert output == ( f"error: {TEST_MODULE_NAME}.temp variable differs from runtime type Literal[5]\n" f"Stub: in file {TEST_MODULE_NAME}.pyi:2\n_decimal.Decimal\nRuntime:\n5\n\n" "Found 1 error (checked 1 module)\n" ) output = run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) assert output == "Success: no issues found in 1 module\n" def test_config_file_error_codes(self) -> None: runtime = "temp = 5\n" stub = "temp = SOME_GLOBAL_CONST" output = run_stubtest(stub=stub, runtime=runtime, options=[]) assert output == ( "error: not checking stubs due to mypy build errors:\n" 'test_module.pyi:1: error: Name "SOME_GLOBAL_CONST" is not defined [name-defined]\n' ) config_file = "[mypy]\ndisable_error_code = name-defined\n" output = run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) assert output == "Success: no issues found in 1 module\n" def test_config_file_error_codes_invalid(self) -> None: runtime = "temp = 5\n" stub = "temp: int\n" config_file = "[mypy]\ndisable_error_code = not-a-valid-name\n" output, outerr = run_stubtest_with_stderr( stub=stub, runtime=runtime, options=[], config_file=config_file ) assert output == "Success: no issues found in 1 module\n" assert outerr == ( "test_module_config.ini: [mypy]: disable_error_code: " "Invalid error code(s): not-a-valid-name\n" ) def test_config_file_wrong_incomplete_feature(self) -> None: runtime = "x = 1\n" stub = "x: int\n" config_file = "[mypy]\nenable_incomplete_feature = Unpack\n" output = run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) assert output == ( "warning: Warning: Unpack is already enabled by default\n" "Success: no issues found in 1 module\n" ) config_file = "[mypy]\nenable_incomplete_feature = not-a-valid-name\n" with self.assertRaises(SystemExit): run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) def test_no_modules(self) -> None: output = io.StringIO() with contextlib.redirect_stdout(output): test_stubs(parse_options([])) assert remove_color_code(output.getvalue()) == "error: no modules to check\n" def test_module_and_typeshed(self) -> None: output = io.StringIO() with contextlib.redirect_stdout(output): test_stubs(parse_options(["--check-typeshed", "some_module"])) assert remove_color_code(output.getvalue()) == ( "error: cannot pass both --check-typeshed and a list of modules\n" )
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/teststubtest.py
Python
NOASSERTION
83,629
from __future__ import annotations from mypy.nodes import CONTRAVARIANT, COVARIANT, INVARIANT from mypy.subtypes import is_subtype from mypy.test.helpers import Suite from mypy.test.typefixture import InterfaceTypeFixture, TypeFixture from mypy.types import Instance, Type, UnpackType class SubtypingSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture(INVARIANT) self.fx_contra = TypeFixture(CONTRAVARIANT) self.fx_co = TypeFixture(COVARIANT) def test_trivial_cases(self) -> None: for simple in self.fx_co.a, self.fx_co.o, self.fx_co.b: self.assert_subtype(simple, simple) def test_instance_subtyping(self) -> None: self.assert_strict_subtype(self.fx.a, self.fx.o) self.assert_strict_subtype(self.fx.b, self.fx.o) self.assert_strict_subtype(self.fx.b, self.fx.a) self.assert_not_subtype(self.fx.a, self.fx.d) self.assert_not_subtype(self.fx.b, self.fx.c) def test_simple_generic_instance_subtyping_invariant(self) -> None: self.assert_subtype(self.fx.ga, self.fx.ga) self.assert_subtype(self.fx.hab, self.fx.hab) self.assert_not_subtype(self.fx.ga, self.fx.g2a) self.assert_not_subtype(self.fx.ga, self.fx.gb) self.assert_not_subtype(self.fx.gb, self.fx.ga) def test_simple_generic_instance_subtyping_covariant(self) -> None: self.assert_subtype(self.fx_co.ga, self.fx_co.ga) self.assert_subtype(self.fx_co.hab, self.fx_co.hab) self.assert_not_subtype(self.fx_co.ga, self.fx_co.g2a) self.assert_not_subtype(self.fx_co.ga, self.fx_co.gb) self.assert_subtype(self.fx_co.gb, self.fx_co.ga) def test_simple_generic_instance_subtyping_contravariant(self) -> None: self.assert_subtype(self.fx_contra.ga, self.fx_contra.ga) self.assert_subtype(self.fx_contra.hab, self.fx_contra.hab) self.assert_not_subtype(self.fx_contra.ga, self.fx_contra.g2a) self.assert_subtype(self.fx_contra.ga, self.fx_contra.gb) self.assert_not_subtype(self.fx_contra.gb, self.fx_contra.ga) def test_generic_subtyping_with_inheritance_invariant(self) -> None: self.assert_subtype(self.fx.gsab, self.fx.gb) self.assert_not_subtype(self.fx.gsab, self.fx.ga) self.assert_not_subtype(self.fx.gsaa, self.fx.gb) def test_generic_subtyping_with_inheritance_covariant(self) -> None: self.assert_subtype(self.fx_co.gsab, self.fx_co.gb) self.assert_subtype(self.fx_co.gsab, self.fx_co.ga) self.assert_not_subtype(self.fx_co.gsaa, self.fx_co.gb) def test_generic_subtyping_with_inheritance_contravariant(self) -> None: self.assert_subtype(self.fx_contra.gsab, self.fx_contra.gb) self.assert_not_subtype(self.fx_contra.gsab, self.fx_contra.ga) self.assert_subtype(self.fx_contra.gsaa, self.fx_contra.gb) def test_interface_subtyping(self) -> None: self.assert_subtype(self.fx.e, self.fx.f) self.assert_equivalent(self.fx.f, self.fx.f) self.assert_not_subtype(self.fx.a, self.fx.f) def test_generic_interface_subtyping(self) -> None: # TODO make this work fx2 = InterfaceTypeFixture() self.assert_subtype(fx2.m1, fx2.gfa) self.assert_not_subtype(fx2.m1, fx2.gfb) self.assert_equivalent(fx2.gfa, fx2.gfa) def test_basic_callable_subtyping(self) -> None: self.assert_strict_subtype( self.fx.callable(self.fx.o, self.fx.d), self.fx.callable(self.fx.a, self.fx.d) ) self.assert_strict_subtype( self.fx.callable(self.fx.d, self.fx.b), self.fx.callable(self.fx.d, self.fx.a) ) self.assert_strict_subtype( self.fx.callable(self.fx.a, self.fx.nonet), self.fx.callable(self.fx.a, self.fx.a) ) self.assert_unrelated( self.fx.callable(self.fx.a, self.fx.a, self.fx.a), self.fx.callable(self.fx.a, self.fx.a), ) def test_default_arg_callable_subtyping(self) -> None: self.assert_strict_subtype( self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.a, self.fx.d, self.fx.a), ) self.assert_strict_subtype( self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.a, self.fx.a), ) self.assert_strict_subtype( self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a), self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), ) self.assert_unrelated( self.fx.callable_default(1, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.d, self.fx.d, self.fx.a), ) self.assert_unrelated( self.fx.callable_default(0, self.fx.a, self.fx.d, self.fx.a), self.fx.callable_default(1, self.fx.a, self.fx.a, self.fx.a), ) self.assert_unrelated( self.fx.callable_default(1, self.fx.a, self.fx.a), self.fx.callable(self.fx.a, self.fx.a, self.fx.a), ) def test_var_arg_callable_subtyping_1(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.a), self.fx.callable_var_arg(0, self.fx.b, self.fx.a), ) def test_var_arg_callable_subtyping_2(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.a), self.fx.callable(self.fx.b, self.fx.a), ) def test_var_arg_callable_subtyping_3(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.a), self.fx.callable(self.fx.a) ) def test_var_arg_callable_subtyping_4(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(1, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.b, self.fx.a), ) def test_var_arg_callable_subtyping_5(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.d, self.fx.a), self.fx.callable(self.fx.b, self.fx.a), ) def test_var_arg_callable_subtyping_6(self) -> None: self.assert_strict_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.f, self.fx.d), self.fx.callable_var_arg(0, self.fx.b, self.fx.e, self.fx.d), ) def test_var_arg_callable_subtyping_7(self) -> None: self.assert_not_subtype( self.fx.callable_var_arg(0, self.fx.b, self.fx.d), self.fx.callable(self.fx.a, self.fx.d), ) def test_var_arg_callable_subtyping_8(self) -> None: self.assert_not_subtype( self.fx.callable_var_arg(0, self.fx.b, self.fx.d), self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d), ) self.assert_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.d), self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d), ) def test_var_arg_callable_subtyping_9(self) -> None: self.assert_not_subtype( self.fx.callable_var_arg(0, self.fx.b, self.fx.b, self.fx.d), self.fx.callable_var_arg(0, self.fx.a, self.fx.d), ) self.assert_subtype( self.fx.callable_var_arg(0, self.fx.a, self.fx.a, self.fx.d), self.fx.callable_var_arg(0, self.fx.b, self.fx.d), ) def test_type_callable_subtyping(self) -> None: self.assert_subtype(self.fx.callable_type(self.fx.d, self.fx.a), self.fx.type_type) self.assert_strict_subtype( self.fx.callable_type(self.fx.d, self.fx.b), self.fx.callable(self.fx.d, self.fx.a) ) self.assert_strict_subtype( self.fx.callable_type(self.fx.a, self.fx.b), self.fx.callable(self.fx.a, self.fx.b) ) def test_type_var_tuple(self) -> None: self.assert_subtype(Instance(self.fx.gvi, []), Instance(self.fx.gvi, [])) self.assert_subtype( Instance(self.fx.gvi, [self.fx.a, self.fx.b]), Instance(self.fx.gvi, [self.fx.a, self.fx.b]), ) self.assert_not_subtype( Instance(self.fx.gvi, [self.fx.a, self.fx.b]), Instance(self.fx.gvi, [self.fx.b, self.fx.a]), ) self.assert_not_subtype( Instance(self.fx.gvi, [self.fx.a, self.fx.b]), Instance(self.fx.gvi, [self.fx.a]) ) self.assert_subtype( Instance(self.fx.gvi, [UnpackType(self.fx.ss)]), Instance(self.fx.gvi, [UnpackType(self.fx.ss)]), ) self.assert_not_subtype( Instance(self.fx.gvi, [UnpackType(self.fx.ss)]), Instance(self.fx.gvi, [UnpackType(self.fx.us)]), ) self.assert_not_subtype( Instance(self.fx.gvi, [UnpackType(self.fx.ss)]), Instance(self.fx.gvi, []) ) self.assert_not_subtype( Instance(self.fx.gvi, [UnpackType(self.fx.ss)]), Instance(self.fx.gvi, [self.fx.anyt]) ) def test_type_var_tuple_with_prefix_suffix(self) -> None: self.assert_subtype( Instance(self.fx.gvi, [self.fx.a, UnpackType(self.fx.ss)]), Instance(self.fx.gvi, [self.fx.a, UnpackType(self.fx.ss)]), ) self.assert_subtype( Instance(self.fx.gvi, [self.fx.a, self.fx.b, UnpackType(self.fx.ss)]), Instance(self.fx.gvi, [self.fx.a, self.fx.b, UnpackType(self.fx.ss)]), ) self.assert_not_subtype( Instance(self.fx.gvi, [self.fx.a, UnpackType(self.fx.ss)]), Instance(self.fx.gvi, [self.fx.b, UnpackType(self.fx.ss)]), ) self.assert_not_subtype( Instance(self.fx.gvi, [self.fx.a, UnpackType(self.fx.ss)]), Instance(self.fx.gvi, [self.fx.a, self.fx.b, UnpackType(self.fx.ss)]), ) self.assert_subtype( Instance(self.fx.gvi, [UnpackType(self.fx.ss), self.fx.a]), Instance(self.fx.gvi, [UnpackType(self.fx.ss), self.fx.a]), ) self.assert_not_subtype( Instance(self.fx.gvi, [UnpackType(self.fx.ss), self.fx.a]), Instance(self.fx.gvi, [UnpackType(self.fx.ss), self.fx.b]), ) self.assert_not_subtype( Instance(self.fx.gvi, [UnpackType(self.fx.ss), self.fx.a]), Instance(self.fx.gvi, [UnpackType(self.fx.ss), self.fx.a, self.fx.b]), ) self.assert_subtype( Instance(self.fx.gvi, [self.fx.a, self.fx.b, UnpackType(self.fx.ss), self.fx.c]), Instance(self.fx.gvi, [self.fx.a, self.fx.b, UnpackType(self.fx.ss), self.fx.c]), ) self.assert_not_subtype( Instance(self.fx.gvi, [self.fx.a, self.fx.b, UnpackType(self.fx.ss), self.fx.c]), Instance(self.fx.gvi, [self.fx.a, UnpackType(self.fx.ss), self.fx.b, self.fx.c]), ) def test_type_var_tuple_unpacked_variable_length_tuple(self) -> None: self.assert_subtype( Instance(self.fx.gvi, [self.fx.a, self.fx.a]), Instance(self.fx.gvi, [UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))]), ) # IDEA: Maybe add these test cases (they are tested pretty well in type # checker tests already): # * more interface subtyping test cases # * more generic interface subtyping test cases # * type variables # * tuple types # * None type # * any type # * generic function types def assert_subtype(self, s: Type, t: Type) -> None: assert is_subtype(s, t), f"{s} not subtype of {t}" def assert_not_subtype(self, s: Type, t: Type) -> None: assert not is_subtype(s, t), f"{s} subtype of {t}" def assert_strict_subtype(self, s: Type, t: Type) -> None: self.assert_subtype(s, t) self.assert_not_subtype(t, s) def assert_equivalent(self, s: Type, t: Type) -> None: self.assert_subtype(s, t) self.assert_subtype(t, s) def assert_unrelated(self, s: Type, t: Type) -> None: self.assert_not_subtype(s, t) self.assert_not_subtype(t, s)
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testsubtypes.py
Python
NOASSERTION
12,248
"""Identity AST transform test cases""" from __future__ import annotations from mypy import build from mypy.errors import CompileError from mypy.modulefinder import BuildSource from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal, normalize_error_messages, parse_options from mypy.test.visitors import TypeAssertTransformVisitor class TransformSuite(DataSuite): required_out_section = True # Reuse semantic analysis test cases. files = [ "semanal-basic.test", "semanal-expressions.test", "semanal-classes.test", "semanal-types.test", "semanal-modules.test", "semanal-statements.test", "semanal-abstractclasses.test", ] native_sep = True def run_case(self, testcase: DataDrivenTestCase) -> None: test_transform(testcase) def test_transform(testcase: DataDrivenTestCase) -> None: """Perform an identity transform test case.""" try: src = "\n".join(testcase.input) options = parse_options(src, testcase, 1) options.use_builtins_fixtures = True options.semantic_analysis_only = True options.show_traceback = True options.force_uppercase_builtins = True result = build.build( sources=[BuildSource("main", None, src)], options=options, alt_lib_path=test_temp_dir ) a = result.errors if a: raise CompileError(a) # Include string representations of the source files in the actual # output. for module in sorted(result.files.keys()): if module in testcase.test_modules: t = TypeAssertTransformVisitor() t.test_only = True file = t.mypyfile(result.files[module]) a += file.str_with_options(options).split("\n") except CompileError as e: a = e.messages if testcase.normalize_output: a = normalize_error_messages(a) assert_string_arrays_equal( testcase.output, a, f"Invalid semantic analyzer output ({testcase.file}, line {testcase.line})", )
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testtransform.py
Python
NOASSERTION
2,199
"""Test cases for the type checker: exporting inferred types""" from __future__ import annotations import re from mypy import build from mypy.errors import CompileError from mypy.modulefinder import BuildSource from mypy.nodes import NameExpr, TempNode from mypy.options import Options from mypy.test.config import test_temp_dir from mypy.test.data import DataDrivenTestCase, DataSuite from mypy.test.helpers import assert_string_arrays_equal from mypy.test.visitors import SkippedNodeSearcher, ignore_node from mypy.util import short_type class TypeExportSuite(DataSuite): required_out_section = True files = ["typexport-basic.test"] def run_case(self, testcase: DataDrivenTestCase) -> None: try: line = testcase.input[0] mask = "" if line.startswith("##"): mask = "(" + line[2:].strip() + ")$" src = "\n".join(testcase.input) options = Options() options.strict_optional = False # TODO: Enable strict optional checking options.use_builtins_fixtures = True options.show_traceback = True options.export_types = True options.preserve_asts = True options.allow_empty_bodies = True options.force_uppercase_builtins = True result = build.build( sources=[BuildSource("main", None, src)], options=options, alt_lib_path=test_temp_dir, ) a = result.errors map = result.types nodes = map.keys() # Ignore NameExpr nodes of variables with explicit (trivial) types # to simplify output. searcher = SkippedNodeSearcher() for file in result.files.values(): searcher.ignore_file = file.fullname not in testcase.test_modules file.accept(searcher) ignored = searcher.nodes # Filter nodes that should be included in the output. keys = [] for node in nodes: if isinstance(node, TempNode): continue if node.line != -1 and map[node]: if ignore_node(node) or node in ignored: continue if re.match(mask, short_type(node)) or ( isinstance(node, NameExpr) and re.match(mask, node.name) ): # Include node in output. keys.append(node) for key in sorted( keys, key=lambda n: (n.line, short_type(n), str(n) + map[n].str_with_options(options)), ): ts = map[key].str_with_options(options).replace("*", "") # Remove erased tags ts = ts.replace("__main__.", "") a.append(f"{short_type(key)}({key.line}) : {ts}") except CompileError as e: a = e.messages assert_string_arrays_equal( testcase.output, a, f"Invalid type checker output ({testcase.file}, line {testcase.line})", )
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testtypegen.py
Python
NOASSERTION
3,153
"""Test cases for mypy types and type operations.""" from __future__ import annotations import re from unittest import TestCase, skipUnless from mypy.erasetype import erase_type, remove_instance_last_known_values from mypy.indirection import TypeIndirectionVisitor from mypy.join import join_simple, join_types from mypy.meet import meet_types, narrow_declared_type from mypy.nodes import ( ARG_NAMED, ARG_OPT, ARG_POS, ARG_STAR, ARG_STAR2, CONTRAVARIANT, COVARIANT, INVARIANT, ArgKind, CallExpr, Expression, NameExpr, ) from mypy.options import Options from mypy.plugins.common import find_shallow_matching_overload_item from mypy.state import state from mypy.subtypes import is_more_precise, is_proper_subtype, is_same_type, is_subtype from mypy.test.helpers import Suite, assert_equal, assert_type, skip from mypy.test.typefixture import InterfaceTypeFixture, TypeFixture from mypy.typeops import false_only, make_simplified_union, true_only from mypy.types import ( AnyType, CallableType, Instance, LiteralType, NoneType, Overloaded, ProperType, TupleType, Type, TypeOfAny, TypeType, TypeVarId, TypeVarType, UnboundType, UninhabitedType, UnionType, UnpackType, get_proper_type, has_recursive_types, ) # Solving the import cycle: import mypy.expandtype # ruff: isort: skip class TypesSuite(Suite): def setUp(self) -> None: self.x = UnboundType("X") # Helpers self.y = UnboundType("Y") self.fx = TypeFixture() self.function = self.fx.function def test_any(self) -> None: assert_equal(str(AnyType(TypeOfAny.special_form)), "Any") def test_simple_unbound_type(self) -> None: u = UnboundType("Foo") assert_equal(str(u), "Foo?") def test_generic_unbound_type(self) -> None: u = UnboundType("Foo", [UnboundType("T"), AnyType(TypeOfAny.special_form)]) assert_equal(str(u), "Foo?[T?, Any]") def test_callable_type(self) -> None: c = CallableType( [self.x, self.y], [ARG_POS, ARG_POS], [None, None], AnyType(TypeOfAny.special_form), self.function, ) assert_equal(str(c), "def (X?, Y?) -> Any") c2 = CallableType([], [], [], NoneType(), self.fx.function) assert_equal(str(c2), "def ()") def test_callable_type_with_default_args(self) -> None: c = CallableType( [self.x, self.y], [ARG_POS, ARG_OPT], [None, None], AnyType(TypeOfAny.special_form), self.function, ) assert_equal(str(c), "def (X?, Y? =) -> Any") c2 = CallableType( [self.x, self.y], [ARG_OPT, ARG_OPT], [None, None], AnyType(TypeOfAny.special_form), self.function, ) assert_equal(str(c2), "def (X? =, Y? =) -> Any") def test_callable_type_with_var_args(self) -> None: c = CallableType( [self.x], [ARG_STAR], [None], AnyType(TypeOfAny.special_form), self.function ) assert_equal(str(c), "def (*X?) -> Any") c2 = CallableType( [self.x, self.y], [ARG_POS, ARG_STAR], [None, None], AnyType(TypeOfAny.special_form), self.function, ) assert_equal(str(c2), "def (X?, *Y?) -> Any") c3 = CallableType( [self.x, self.y], [ARG_OPT, ARG_STAR], [None, None], AnyType(TypeOfAny.special_form), self.function, ) assert_equal(str(c3), "def (X? =, *Y?) -> Any") def test_tuple_type_upper(self) -> None: options = Options() options.force_uppercase_builtins = True assert_equal(TupleType([], self.fx.std_tuple).str_with_options(options), "Tuple[()]") assert_equal(TupleType([self.x], self.fx.std_tuple).str_with_options(options), "Tuple[X?]") assert_equal( TupleType( [self.x, AnyType(TypeOfAny.special_form)], self.fx.std_tuple ).str_with_options(options), "Tuple[X?, Any]", ) def test_type_variable_binding(self) -> None: assert_equal( str( TypeVarType( "X", "X", TypeVarId(1), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) ) ), "X`1", ) assert_equal( str( TypeVarType( "X", "X", TypeVarId(1), [self.x, self.y], self.fx.o, AnyType(TypeOfAny.from_omitted_generics), ) ), "X`1", ) def test_generic_function_type(self) -> None: c = CallableType( [self.x, self.y], [ARG_POS, ARG_POS], [None, None], self.y, self.function, name=None, variables=[ TypeVarType( "X", "X", TypeVarId(-1), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics), ) ], ) assert_equal(str(c), "def [X] (X?, Y?) -> Y?") v = [ TypeVarType( "Y", "Y", TypeVarId(-1), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) ), TypeVarType( "X", "X", TypeVarId(-2), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) ), ] c2 = CallableType([], [], [], NoneType(), self.function, name=None, variables=v) assert_equal(str(c2), "def [Y, X] ()") def test_type_alias_expand_once(self) -> None: A, target = self.fx.def_alias_1(self.fx.a) assert get_proper_type(A) == target assert get_proper_type(target) == target A, target = self.fx.def_alias_2(self.fx.a) assert get_proper_type(A) == target assert get_proper_type(target) == target def test_type_alias_expand_all(self) -> None: A, _ = self.fx.def_alias_1(self.fx.a) assert A.expand_all_if_possible() is None A, _ = self.fx.def_alias_2(self.fx.a) assert A.expand_all_if_possible() is None B = self.fx.non_rec_alias(self.fx.a) C = self.fx.non_rec_alias(TupleType([B, B], Instance(self.fx.std_tuplei, [B]))) assert C.expand_all_if_possible() == TupleType( [self.fx.a, self.fx.a], Instance(self.fx.std_tuplei, [self.fx.a]) ) def test_recursive_nested_in_non_recursive(self) -> None: A, _ = self.fx.def_alias_1(self.fx.a) T = TypeVarType( "T", "T", TypeVarId(-1), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) ) NA = self.fx.non_rec_alias(Instance(self.fx.gi, [T]), [T], [A]) assert not NA.is_recursive assert has_recursive_types(NA) def test_indirection_no_infinite_recursion(self) -> None: A, _ = self.fx.def_alias_1(self.fx.a) visitor = TypeIndirectionVisitor() modules = A.accept(visitor) assert modules == {"__main__", "builtins"} A, _ = self.fx.def_alias_2(self.fx.a) visitor = TypeIndirectionVisitor() modules = A.accept(visitor) assert modules == {"__main__", "builtins"} class TypeOpsSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture(INVARIANT) self.fx_co = TypeFixture(COVARIANT) self.fx_contra = TypeFixture(CONTRAVARIANT) # expand_type def test_trivial_expand(self) -> None: for t in ( self.fx.a, self.fx.o, self.fx.t, self.fx.nonet, self.tuple(self.fx.a), self.callable([], self.fx.a, self.fx.a), self.fx.anyt, ): self.assert_expand(t, [], t) self.assert_expand(t, [], t) self.assert_expand(t, [], t) def test_trivial_expand_recursive(self) -> None: A, _ = self.fx.def_alias_1(self.fx.a) self.assert_expand(A, [], A) A, _ = self.fx.def_alias_2(self.fx.a) self.assert_expand(A, [], A) def test_expand_naked_type_var(self) -> None: self.assert_expand(self.fx.t, [(self.fx.t.id, self.fx.a)], self.fx.a) self.assert_expand(self.fx.t, [(self.fx.s.id, self.fx.a)], self.fx.t) def test_expand_basic_generic_types(self) -> None: self.assert_expand(self.fx.gt, [(self.fx.t.id, self.fx.a)], self.fx.ga) # IDEA: Add test cases for # tuple types # callable types # multiple arguments def assert_expand( self, orig: Type, map_items: list[tuple[TypeVarId, Type]], result: Type ) -> None: lower_bounds = {} for id, t in map_items: lower_bounds[id] = t exp = mypy.expandtype.expand_type(orig, lower_bounds) # Remove erased tags (asterisks). assert_equal(str(exp).replace("*", ""), str(result)) # erase_type def test_trivial_erase(self) -> None: for t in (self.fx.a, self.fx.o, self.fx.nonet, self.fx.anyt): self.assert_erase(t, t) def test_erase_with_type_variable(self) -> None: self.assert_erase(self.fx.t, self.fx.anyt) def test_erase_with_generic_type(self) -> None: self.assert_erase(self.fx.ga, self.fx.gdyn) self.assert_erase(self.fx.hab, Instance(self.fx.hi, [self.fx.anyt, self.fx.anyt])) def test_erase_with_generic_type_recursive(self) -> None: tuple_any = Instance(self.fx.std_tuplei, [AnyType(TypeOfAny.explicit)]) A, _ = self.fx.def_alias_1(self.fx.a) self.assert_erase(A, tuple_any) A, _ = self.fx.def_alias_2(self.fx.a) self.assert_erase(A, UnionType([self.fx.a, tuple_any])) def test_erase_with_tuple_type(self) -> None: self.assert_erase(self.tuple(self.fx.a), self.fx.std_tuple) def test_erase_with_function_type(self) -> None: self.assert_erase( self.fx.callable(self.fx.a, self.fx.b), CallableType( arg_types=[self.fx.anyt, self.fx.anyt], arg_kinds=[ARG_STAR, ARG_STAR2], arg_names=[None, None], ret_type=self.fx.anyt, fallback=self.fx.function, ), ) def test_erase_with_type_object(self) -> None: self.assert_erase( self.fx.callable_type(self.fx.a, self.fx.b), CallableType( arg_types=[self.fx.anyt, self.fx.anyt], arg_kinds=[ARG_STAR, ARG_STAR2], arg_names=[None, None], ret_type=self.fx.anyt, fallback=self.fx.type_type, ), ) def test_erase_with_type_type(self) -> None: self.assert_erase(self.fx.type_a, self.fx.type_a) self.assert_erase(self.fx.type_t, self.fx.type_any) def assert_erase(self, orig: Type, result: Type) -> None: assert_equal(str(erase_type(orig)), str(result)) # is_more_precise def test_is_more_precise(self) -> None: fx = self.fx assert is_more_precise(fx.b, fx.a) assert is_more_precise(fx.b, fx.b) assert is_more_precise(fx.b, fx.b) assert is_more_precise(fx.b, fx.anyt) assert is_more_precise(self.tuple(fx.b, fx.a), self.tuple(fx.b, fx.a)) assert is_more_precise(self.tuple(fx.b, fx.b), self.tuple(fx.b, fx.a)) assert not is_more_precise(fx.a, fx.b) assert not is_more_precise(fx.anyt, fx.b) # is_proper_subtype def test_is_proper_subtype(self) -> None: fx = self.fx assert is_proper_subtype(fx.a, fx.a) assert is_proper_subtype(fx.b, fx.a) assert is_proper_subtype(fx.b, fx.o) assert is_proper_subtype(fx.b, fx.o) assert not is_proper_subtype(fx.a, fx.b) assert not is_proper_subtype(fx.o, fx.b) assert is_proper_subtype(fx.anyt, fx.anyt) assert not is_proper_subtype(fx.a, fx.anyt) assert not is_proper_subtype(fx.anyt, fx.a) assert is_proper_subtype(fx.ga, fx.ga) assert is_proper_subtype(fx.gdyn, fx.gdyn) assert not is_proper_subtype(fx.ga, fx.gdyn) assert not is_proper_subtype(fx.gdyn, fx.ga) assert is_proper_subtype(fx.t, fx.t) assert not is_proper_subtype(fx.t, fx.s) assert is_proper_subtype(fx.a, UnionType([fx.a, fx.b])) assert is_proper_subtype(UnionType([fx.a, fx.b]), UnionType([fx.a, fx.b, fx.c])) assert not is_proper_subtype(UnionType([fx.a, fx.b]), UnionType([fx.b, fx.c])) def test_is_proper_subtype_covariance(self) -> None: fx_co = self.fx_co assert is_proper_subtype(fx_co.gsab, fx_co.gb) assert is_proper_subtype(fx_co.gsab, fx_co.ga) assert not is_proper_subtype(fx_co.gsaa, fx_co.gb) assert is_proper_subtype(fx_co.gb, fx_co.ga) assert not is_proper_subtype(fx_co.ga, fx_co.gb) def test_is_proper_subtype_contravariance(self) -> None: fx_contra = self.fx_contra assert is_proper_subtype(fx_contra.gsab, fx_contra.gb) assert not is_proper_subtype(fx_contra.gsab, fx_contra.ga) assert is_proper_subtype(fx_contra.gsaa, fx_contra.gb) assert not is_proper_subtype(fx_contra.gb, fx_contra.ga) assert is_proper_subtype(fx_contra.ga, fx_contra.gb) def test_is_proper_subtype_invariance(self) -> None: fx = self.fx assert is_proper_subtype(fx.gsab, fx.gb) assert not is_proper_subtype(fx.gsab, fx.ga) assert not is_proper_subtype(fx.gsaa, fx.gb) assert not is_proper_subtype(fx.gb, fx.ga) assert not is_proper_subtype(fx.ga, fx.gb) def test_is_proper_subtype_and_subtype_literal_types(self) -> None: fx = self.fx lit1 = fx.lit1 lit2 = fx.lit2 lit3 = fx.lit3 assert is_proper_subtype(lit1, fx.a) assert not is_proper_subtype(lit1, fx.d) assert not is_proper_subtype(fx.a, lit1) assert is_proper_subtype(fx.uninhabited, lit1) assert not is_proper_subtype(lit1, fx.uninhabited) assert is_proper_subtype(lit1, lit1) assert not is_proper_subtype(lit1, lit2) assert not is_proper_subtype(lit2, lit3) assert is_subtype(lit1, fx.a) assert not is_subtype(lit1, fx.d) assert not is_subtype(fx.a, lit1) assert is_subtype(fx.uninhabited, lit1) assert not is_subtype(lit1, fx.uninhabited) assert is_subtype(lit1, lit1) assert not is_subtype(lit1, lit2) assert not is_subtype(lit2, lit3) assert not is_proper_subtype(lit1, fx.anyt) assert not is_proper_subtype(fx.anyt, lit1) assert is_subtype(lit1, fx.anyt) assert is_subtype(fx.anyt, lit1) def test_subtype_aliases(self) -> None: A1, _ = self.fx.def_alias_1(self.fx.a) AA1, _ = self.fx.def_alias_1(self.fx.a) assert is_subtype(A1, AA1) assert is_subtype(AA1, A1) A2, _ = self.fx.def_alias_2(self.fx.a) AA2, _ = self.fx.def_alias_2(self.fx.a) assert is_subtype(A2, AA2) assert is_subtype(AA2, A2) B1, _ = self.fx.def_alias_1(self.fx.b) B2, _ = self.fx.def_alias_2(self.fx.b) assert is_subtype(B1, A1) assert is_subtype(B2, A2) assert not is_subtype(A1, B1) assert not is_subtype(A2, B2) assert not is_subtype(A2, A1) assert is_subtype(A1, A2) # can_be_true / can_be_false def test_empty_tuple_always_false(self) -> None: tuple_type = self.tuple() assert tuple_type.can_be_false assert not tuple_type.can_be_true def test_nonempty_tuple_always_true(self) -> None: tuple_type = self.tuple(AnyType(TypeOfAny.special_form), AnyType(TypeOfAny.special_form)) assert tuple_type.can_be_true assert not tuple_type.can_be_false def test_union_can_be_true_if_any_true(self) -> None: union_type = UnionType([self.fx.a, self.tuple()]) assert union_type.can_be_true def test_union_can_not_be_true_if_none_true(self) -> None: union_type = UnionType([self.tuple(), self.tuple()]) assert not union_type.can_be_true def test_union_can_be_false_if_any_false(self) -> None: union_type = UnionType([self.fx.a, self.tuple()]) assert union_type.can_be_false def test_union_can_not_be_false_if_none_false(self) -> None: union_type = UnionType([self.tuple(self.fx.a), self.tuple(self.fx.d)]) assert not union_type.can_be_false # true_only / false_only def test_true_only_of_false_type_is_uninhabited(self) -> None: to = true_only(NoneType()) assert_type(UninhabitedType, to) def test_true_only_of_true_type_is_idempotent(self) -> None: always_true = self.tuple(AnyType(TypeOfAny.special_form)) to = true_only(always_true) assert always_true is to def test_true_only_of_instance(self) -> None: to = true_only(self.fx.a) assert_equal(str(to), "A") assert to.can_be_true assert not to.can_be_false assert_type(Instance, to) # The original class still can be false assert self.fx.a.can_be_false def test_true_only_of_union(self) -> None: tup_type = self.tuple(AnyType(TypeOfAny.special_form)) # Union of something that is unknown, something that is always true, something # that is always false union_type = UnionType([self.fx.a, tup_type, self.tuple()]) to = true_only(union_type) assert isinstance(to, UnionType) assert_equal(len(to.items), 2) assert to.items[0].can_be_true assert not to.items[0].can_be_false assert to.items[1] is tup_type def test_false_only_of_true_type_is_uninhabited(self) -> None: with state.strict_optional_set(True): fo = false_only(self.tuple(AnyType(TypeOfAny.special_form))) assert_type(UninhabitedType, fo) def test_false_only_tuple(self) -> None: with state.strict_optional_set(False): fo = false_only(self.tuple(self.fx.a)) assert_equal(fo, NoneType()) with state.strict_optional_set(True): fo = false_only(self.tuple(self.fx.a)) assert_equal(fo, UninhabitedType()) def test_false_only_of_false_type_is_idempotent(self) -> None: always_false = NoneType() fo = false_only(always_false) assert always_false is fo def test_false_only_of_instance(self) -> None: fo = false_only(self.fx.a) assert_equal(str(fo), "A") assert not fo.can_be_true assert fo.can_be_false assert_type(Instance, fo) # The original class still can be true assert self.fx.a.can_be_true def test_false_only_of_union(self) -> None: with state.strict_optional_set(True): tup_type = self.tuple() # Union of something that is unknown, something that is always true, something # that is always false union_type = UnionType( [self.fx.a, self.tuple(AnyType(TypeOfAny.special_form)), tup_type] ) assert_equal(len(union_type.items), 3) fo = false_only(union_type) assert isinstance(fo, UnionType) assert_equal(len(fo.items), 2) assert not fo.items[0].can_be_true assert fo.items[0].can_be_false assert fo.items[1] is tup_type def test_simplified_union(self) -> None: fx = self.fx self.assert_simplified_union([fx.a, fx.a], fx.a) self.assert_simplified_union([fx.a, fx.b], fx.a) self.assert_simplified_union([fx.a, fx.d], UnionType([fx.a, fx.d])) self.assert_simplified_union([fx.a, fx.uninhabited], fx.a) self.assert_simplified_union([fx.ga, fx.gs2a], fx.ga) self.assert_simplified_union([fx.ga, fx.gsab], UnionType([fx.ga, fx.gsab])) self.assert_simplified_union([fx.ga, fx.gsba], fx.ga) self.assert_simplified_union([fx.a, UnionType([fx.d])], UnionType([fx.a, fx.d])) self.assert_simplified_union([fx.a, UnionType([fx.a])], fx.a) self.assert_simplified_union( [fx.b, UnionType([fx.c, UnionType([fx.d])])], UnionType([fx.b, fx.c, fx.d]) ) def test_simplified_union_with_literals(self) -> None: fx = self.fx self.assert_simplified_union([fx.lit1, fx.a], fx.a) self.assert_simplified_union([fx.lit1, fx.lit2, fx.a], fx.a) self.assert_simplified_union([fx.lit1, fx.lit1], fx.lit1) self.assert_simplified_union([fx.lit1, fx.lit2], UnionType([fx.lit1, fx.lit2])) self.assert_simplified_union([fx.lit1, fx.lit3], UnionType([fx.lit1, fx.lit3])) self.assert_simplified_union([fx.lit1, fx.uninhabited], fx.lit1) self.assert_simplified_union([fx.lit1_inst, fx.a], fx.a) self.assert_simplified_union([fx.lit1_inst, fx.lit1_inst], fx.lit1_inst) self.assert_simplified_union( [fx.lit1_inst, fx.lit2_inst], UnionType([fx.lit1_inst, fx.lit2_inst]) ) self.assert_simplified_union( [fx.lit1_inst, fx.lit3_inst], UnionType([fx.lit1_inst, fx.lit3_inst]) ) self.assert_simplified_union([fx.lit1_inst, fx.uninhabited], fx.lit1_inst) self.assert_simplified_union([fx.lit1, fx.lit1_inst], fx.lit1) self.assert_simplified_union([fx.lit1, fx.lit2_inst], UnionType([fx.lit1, fx.lit2_inst])) self.assert_simplified_union([fx.lit1, fx.lit3_inst], UnionType([fx.lit1, fx.lit3_inst])) def test_simplified_union_with_str_literals(self) -> None: fx = self.fx self.assert_simplified_union([fx.lit_str1, fx.lit_str2, fx.str_type], fx.str_type) self.assert_simplified_union([fx.lit_str1, fx.lit_str1, fx.lit_str1], fx.lit_str1) self.assert_simplified_union( [fx.lit_str1, fx.lit_str2, fx.lit_str3], UnionType([fx.lit_str1, fx.lit_str2, fx.lit_str3]), ) self.assert_simplified_union( [fx.lit_str1, fx.lit_str2, fx.uninhabited], UnionType([fx.lit_str1, fx.lit_str2]) ) def test_simplify_very_large_union(self) -> None: fx = self.fx literals = [] for i in range(5000): literals.append(LiteralType("v%d" % i, fx.str_type)) # This shouldn't be very slow, even if the union is big. self.assert_simplified_union([*literals, fx.str_type], fx.str_type) def test_simplified_union_with_str_instance_literals(self) -> None: fx = self.fx self.assert_simplified_union( [fx.lit_str1_inst, fx.lit_str2_inst, fx.str_type], fx.str_type ) self.assert_simplified_union( [fx.lit_str1_inst, fx.lit_str1_inst, fx.lit_str1_inst], fx.lit_str1_inst ) self.assert_simplified_union( [fx.lit_str1_inst, fx.lit_str2_inst, fx.lit_str3_inst], UnionType([fx.lit_str1_inst, fx.lit_str2_inst, fx.lit_str3_inst]), ) self.assert_simplified_union( [fx.lit_str1_inst, fx.lit_str2_inst, fx.uninhabited], UnionType([fx.lit_str1_inst, fx.lit_str2_inst]), ) def test_simplified_union_with_mixed_str_literals(self) -> None: fx = self.fx self.assert_simplified_union( [fx.lit_str1, fx.lit_str2, fx.lit_str3_inst], UnionType([fx.lit_str1, fx.lit_str2, fx.lit_str3_inst]), ) self.assert_simplified_union([fx.lit_str1, fx.lit_str1, fx.lit_str1_inst], fx.lit_str1) def assert_simplified_union(self, original: list[Type], union: Type) -> None: assert_equal(make_simplified_union(original), union) assert_equal(make_simplified_union(list(reversed(original))), union) # Helpers def tuple(self, *a: Type) -> TupleType: return TupleType(list(a), self.fx.std_tuple) def callable(self, vars: list[str], *a: Type) -> CallableType: """callable(args, a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r and type arguments vars. """ tv: list[TypeVarType] = [] n = -1 for v in vars: tv.append( TypeVarType( v, v, TypeVarId(n), [], self.fx.o, AnyType(TypeOfAny.from_omitted_generics) ) ) n -= 1 return CallableType( list(a[:-1]), [ARG_POS] * (len(a) - 1), [None] * (len(a) - 1), a[-1], self.fx.function, name=None, variables=tv, ) class JoinSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture(INVARIANT) self.fx_co = TypeFixture(COVARIANT) self.fx_contra = TypeFixture(CONTRAVARIANT) def test_trivial_cases(self) -> None: for simple in self.fx.a, self.fx.o, self.fx.b: self.assert_join(simple, simple, simple) def test_class_subtyping(self) -> None: self.assert_join(self.fx.a, self.fx.o, self.fx.o) self.assert_join(self.fx.b, self.fx.o, self.fx.o) self.assert_join(self.fx.a, self.fx.d, self.fx.o) self.assert_join(self.fx.b, self.fx.c, self.fx.a) self.assert_join(self.fx.b, self.fx.d, self.fx.o) def test_tuples(self) -> None: self.assert_join(self.tuple(), self.tuple(), self.tuple()) self.assert_join(self.tuple(self.fx.a), self.tuple(self.fx.a), self.tuple(self.fx.a)) self.assert_join( self.tuple(self.fx.b, self.fx.c), self.tuple(self.fx.a, self.fx.d), self.tuple(self.fx.a, self.fx.o), ) self.assert_join( self.tuple(self.fx.a, self.fx.a), self.fx.std_tuple, self.var_tuple(self.fx.anyt) ) self.assert_join( self.tuple(self.fx.a), self.tuple(self.fx.a, self.fx.a), self.var_tuple(self.fx.a) ) self.assert_join( self.tuple(self.fx.b), self.tuple(self.fx.a, self.fx.c), self.var_tuple(self.fx.a) ) self.assert_join(self.tuple(), self.tuple(self.fx.a), self.var_tuple(self.fx.a)) def test_var_tuples(self) -> None: self.assert_join( self.tuple(self.fx.a), self.var_tuple(self.fx.a), self.var_tuple(self.fx.a) ) self.assert_join( self.var_tuple(self.fx.a), self.tuple(self.fx.a), self.var_tuple(self.fx.a) ) self.assert_join(self.var_tuple(self.fx.a), self.tuple(), self.var_tuple(self.fx.a)) def test_function_types(self) -> None: self.assert_join( self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.b), ) self.assert_join( self.callable(self.fx.a, self.fx.b), self.callable(self.fx.b, self.fx.b), self.callable(self.fx.b, self.fx.b), ) self.assert_join( self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.a), self.callable(self.fx.a, self.fx.a), ) self.assert_join(self.callable(self.fx.a, self.fx.b), self.fx.function, self.fx.function) self.assert_join( self.callable(self.fx.a, self.fx.b), self.callable(self.fx.d, self.fx.b), self.fx.function, ) def test_type_vars(self) -> None: self.assert_join(self.fx.t, self.fx.t, self.fx.t) self.assert_join(self.fx.s, self.fx.s, self.fx.s) self.assert_join(self.fx.t, self.fx.s, self.fx.o) def test_none(self) -> None: # Any type t joined with None results in t. for t in [ NoneType(), self.fx.a, self.fx.o, UnboundType("x"), self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b), self.fx.anyt, ]: self.assert_join(t, NoneType(), t) def test_unbound_type(self) -> None: self.assert_join(UnboundType("x"), UnboundType("x"), self.fx.anyt) self.assert_join(UnboundType("x"), UnboundType("y"), self.fx.anyt) # Any type t joined with an unbound type results in dynamic. Unbound # type means that there is an error somewhere in the program, so this # does not affect type safety (whatever the result). for t in [ self.fx.a, self.fx.o, self.fx.ga, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b), ]: self.assert_join(t, UnboundType("X"), self.fx.anyt) def test_any_type(self) -> None: # Join against 'Any' type always results in 'Any'. for t in [ self.fx.anyt, self.fx.a, self.fx.o, NoneType(), UnboundType("x"), self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b), ]: self.assert_join(t, self.fx.anyt, self.fx.anyt) def test_mixed_truth_restricted_type_simple(self) -> None: # join_simple against differently restricted truthiness types drops restrictions. true_a = true_only(self.fx.a) false_o = false_only(self.fx.o) j = join_simple(self.fx.o, true_a, false_o) assert j.can_be_true assert j.can_be_false def test_mixed_truth_restricted_type(self) -> None: # join_types against differently restricted truthiness types drops restrictions. true_any = true_only(AnyType(TypeOfAny.special_form)) false_o = false_only(self.fx.o) j = join_types(true_any, false_o) assert j.can_be_true assert j.can_be_false def test_other_mixed_types(self) -> None: # In general, joining unrelated types produces object. for t1 in [self.fx.a, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: for t2 in [self.fx.a, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: if str(t1) != str(t2): self.assert_join(t1, t2, self.fx.o) def test_simple_generics(self) -> None: self.assert_join(self.fx.ga, self.fx.nonet, self.fx.ga) self.assert_join(self.fx.ga, self.fx.anyt, self.fx.anyt) for t in [ self.fx.a, self.fx.o, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b), ]: self.assert_join(t, self.fx.ga, self.fx.o) def test_generics_invariant(self) -> None: self.assert_join(self.fx.ga, self.fx.ga, self.fx.ga) self.assert_join(self.fx.ga, self.fx.gb, self.fx.o) self.assert_join(self.fx.ga, self.fx.gd, self.fx.o) self.assert_join(self.fx.ga, self.fx.g2a, self.fx.o) def test_generics_covariant(self) -> None: self.assert_join(self.fx_co.ga, self.fx_co.ga, self.fx_co.ga) self.assert_join(self.fx_co.ga, self.fx_co.gb, self.fx_co.ga) self.assert_join(self.fx_co.ga, self.fx_co.gd, self.fx_co.go) self.assert_join(self.fx_co.ga, self.fx_co.g2a, self.fx_co.o) def test_generics_contravariant(self) -> None: self.assert_join(self.fx_contra.ga, self.fx_contra.ga, self.fx_contra.ga) # TODO: this can be more precise than "object", see a comment in mypy/join.py self.assert_join(self.fx_contra.ga, self.fx_contra.gb, self.fx_contra.o) self.assert_join(self.fx_contra.ga, self.fx_contra.g2a, self.fx_contra.o) def test_generics_with_multiple_args(self) -> None: self.assert_join(self.fx_co.hab, self.fx_co.hab, self.fx_co.hab) self.assert_join(self.fx_co.hab, self.fx_co.hbb, self.fx_co.hab) self.assert_join(self.fx_co.had, self.fx_co.haa, self.fx_co.hao) def test_generics_with_inheritance(self) -> None: self.assert_join(self.fx_co.gsab, self.fx_co.gb, self.fx_co.gb) self.assert_join(self.fx_co.gsba, self.fx_co.gb, self.fx_co.ga) self.assert_join(self.fx_co.gsab, self.fx_co.gd, self.fx_co.go) def test_generics_with_inheritance_and_shared_supertype(self) -> None: self.assert_join(self.fx_co.gsba, self.fx_co.gs2a, self.fx_co.ga) self.assert_join(self.fx_co.gsab, self.fx_co.gs2a, self.fx_co.ga) self.assert_join(self.fx_co.gsab, self.fx_co.gs2d, self.fx_co.go) def test_generic_types_and_any(self) -> None: self.assert_join(self.fx.gdyn, self.fx.ga, self.fx.gdyn) self.assert_join(self.fx_co.gdyn, self.fx_co.ga, self.fx_co.gdyn) self.assert_join(self.fx_contra.gdyn, self.fx_contra.ga, self.fx_contra.gdyn) def test_callables_with_any(self) -> None: self.assert_join( self.callable(self.fx.a, self.fx.a, self.fx.anyt, self.fx.a), self.callable(self.fx.a, self.fx.anyt, self.fx.a, self.fx.anyt), self.callable(self.fx.a, self.fx.anyt, self.fx.anyt, self.fx.anyt), ) def test_overloaded(self) -> None: c = self.callable def ov(*items: CallableType) -> Overloaded: return Overloaded(list(items)) fx = self.fx func = fx.function c1 = c(fx.a, fx.a) c2 = c(fx.b, fx.b) c3 = c(fx.c, fx.c) self.assert_join(ov(c1, c2), c1, c1) self.assert_join(ov(c1, c2), c2, c2) self.assert_join(ov(c1, c2), ov(c1, c2), ov(c1, c2)) self.assert_join(ov(c1, c2), ov(c1, c3), c1) self.assert_join(ov(c2, c1), ov(c3, c1), c1) self.assert_join(ov(c1, c2), c3, func) def test_overloaded_with_any(self) -> None: c = self.callable def ov(*items: CallableType) -> Overloaded: return Overloaded(list(items)) fx = self.fx any = fx.anyt self.assert_join(ov(c(fx.a, fx.a), c(fx.b, fx.b)), c(any, fx.b), c(any, fx.b)) self.assert_join(ov(c(fx.a, fx.a), c(any, fx.b)), c(fx.b, fx.b), c(any, fx.b)) def test_join_interface_types(self) -> None: self.assert_join(self.fx.f, self.fx.f, self.fx.f) self.assert_join(self.fx.f, self.fx.f2, self.fx.o) self.assert_join(self.fx.f, self.fx.f3, self.fx.f) def test_join_interface_and_class_types(self) -> None: self.assert_join(self.fx.o, self.fx.f, self.fx.o) self.assert_join(self.fx.a, self.fx.f, self.fx.o) self.assert_join(self.fx.e, self.fx.f, self.fx.f) @skip def test_join_class_types_with_interface_result(self) -> None: # Unique result self.assert_join(self.fx.e, self.fx.e2, self.fx.f) # Ambiguous result self.assert_join(self.fx.e2, self.fx.e3, self.fx.anyt) @skip def test_generic_interfaces(self) -> None: fx = InterfaceTypeFixture() self.assert_join(fx.gfa, fx.gfa, fx.gfa) self.assert_join(fx.gfa, fx.gfb, fx.o) self.assert_join(fx.m1, fx.gfa, fx.gfa) self.assert_join(fx.m1, fx.gfb, fx.o) def test_simple_type_objects(self) -> None: t1 = self.type_callable(self.fx.a, self.fx.a) t2 = self.type_callable(self.fx.b, self.fx.b) tr = self.type_callable(self.fx.b, self.fx.a) self.assert_join(t1, t1, t1) j = join_types(t1, t1) assert isinstance(j, CallableType) assert j.is_type_obj() self.assert_join(t1, t2, tr) self.assert_join(t1, self.fx.type_type, self.fx.type_type) self.assert_join(self.fx.type_type, self.fx.type_type, self.fx.type_type) def test_type_type(self) -> None: self.assert_join(self.fx.type_a, self.fx.type_b, self.fx.type_a) self.assert_join(self.fx.type_b, self.fx.type_any, self.fx.type_any) self.assert_join(self.fx.type_b, self.fx.type_type, self.fx.type_type) self.assert_join(self.fx.type_b, self.fx.type_c, self.fx.type_a) self.assert_join(self.fx.type_c, self.fx.type_d, TypeType.make_normalized(self.fx.o)) self.assert_join(self.fx.type_type, self.fx.type_any, self.fx.type_type) self.assert_join(self.fx.type_b, self.fx.anyt, self.fx.anyt) def test_literal_type(self) -> None: a = self.fx.a d = self.fx.d lit1 = self.fx.lit1 lit2 = self.fx.lit2 lit3 = self.fx.lit3 self.assert_join(lit1, lit1, lit1) self.assert_join(lit1, a, a) self.assert_join(lit1, d, self.fx.o) self.assert_join(lit1, lit2, a) self.assert_join(lit1, lit3, self.fx.o) self.assert_join(lit1, self.fx.anyt, self.fx.anyt) self.assert_join(UnionType([lit1, lit2]), lit2, UnionType([lit1, lit2])) self.assert_join(UnionType([lit1, lit2]), a, a) self.assert_join(UnionType([lit1, lit3]), a, UnionType([a, lit3])) self.assert_join(UnionType([d, lit3]), lit3, d) self.assert_join(UnionType([d, lit3]), d, UnionType([d, lit3])) self.assert_join(UnionType([a, lit1]), lit1, a) self.assert_join(UnionType([a, lit1]), lit2, a) self.assert_join(UnionType([lit1, lit2]), UnionType([lit1, lit2]), UnionType([lit1, lit2])) # The order in which we try joining two unions influences the # ordering of the items in the final produced unions. So, we # manually call 'assert_simple_join' and tune the output # after swapping the arguments here. self.assert_simple_join( UnionType([lit1, lit2]), UnionType([lit2, lit3]), UnionType([lit1, lit2, lit3]) ) self.assert_simple_join( UnionType([lit2, lit3]), UnionType([lit1, lit2]), UnionType([lit2, lit3, lit1]) ) def test_variadic_tuple_joins(self) -> None: # These tests really test just the "arity", to be sure it is handled correctly. self.assert_join( self.tuple(self.fx.a, self.fx.a), self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), ) self.assert_join( self.tuple(self.fx.a, self.fx.a), self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), ) self.assert_join( self.tuple(self.fx.a, self.fx.a), self.tuple(self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), self.tuple(self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), ) self.assert_join( self.tuple( self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a ), self.tuple( self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a ), self.tuple( self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a ), ) self.assert_join( self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), self.tuple( self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a ), self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), ) self.assert_join( self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), ) self.assert_join( self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), self.tuple( self.fx.b, UnpackType(Instance(self.fx.std_tuplei, [self.fx.b])), self.fx.b ), self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), ) # There are additional test cases in check-inference.test. # TODO: Function types + varargs and default args. def assert_join(self, s: Type, t: Type, join: Type) -> None: self.assert_simple_join(s, t, join) self.assert_simple_join(t, s, join) def assert_simple_join(self, s: Type, t: Type, join: Type) -> None: result = join_types(s, t) actual = str(result) expected = str(join) assert_equal(actual, expected, f"join({s}, {t}) == {{}} ({{}} expected)") assert is_subtype(s, result), f"{s} not subtype of {result}" assert is_subtype(t, result), f"{t} not subtype of {result}" def tuple(self, *a: Type) -> TupleType: return TupleType(list(a), self.fx.std_tuple) def var_tuple(self, t: Type) -> Instance: """Construct a variable-length tuple type""" return Instance(self.fx.std_tuplei, [t]) def callable(self, *a: Type) -> CallableType: """callable(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r. """ n = len(a) - 1 return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n, a[-1], self.fx.function) def type_callable(self, *a: Type) -> CallableType: """type_callable(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r, and which represents a type. """ n = len(a) - 1 return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n, a[-1], self.fx.type_type) class MeetSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture() def test_trivial_cases(self) -> None: for simple in self.fx.a, self.fx.o, self.fx.b: self.assert_meet(simple, simple, simple) def test_class_subtyping(self) -> None: self.assert_meet(self.fx.a, self.fx.o, self.fx.a) self.assert_meet(self.fx.a, self.fx.b, self.fx.b) self.assert_meet(self.fx.b, self.fx.o, self.fx.b) self.assert_meet(self.fx.a, self.fx.d, NoneType()) self.assert_meet(self.fx.b, self.fx.c, NoneType()) def test_tuples(self) -> None: self.assert_meet(self.tuple(), self.tuple(), self.tuple()) self.assert_meet(self.tuple(self.fx.a), self.tuple(self.fx.a), self.tuple(self.fx.a)) self.assert_meet( self.tuple(self.fx.b, self.fx.c), self.tuple(self.fx.a, self.fx.d), self.tuple(self.fx.b, NoneType()), ) self.assert_meet( self.tuple(self.fx.a, self.fx.a), self.fx.std_tuple, self.tuple(self.fx.a, self.fx.a) ) self.assert_meet(self.tuple(self.fx.a), self.tuple(self.fx.a, self.fx.a), NoneType()) def test_function_types(self) -> None: self.assert_meet( self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.b), ) self.assert_meet( self.callable(self.fx.a, self.fx.b), self.callable(self.fx.b, self.fx.b), self.callable(self.fx.a, self.fx.b), ) self.assert_meet( self.callable(self.fx.a, self.fx.b), self.callable(self.fx.a, self.fx.a), self.callable(self.fx.a, self.fx.b), ) def test_type_vars(self) -> None: self.assert_meet(self.fx.t, self.fx.t, self.fx.t) self.assert_meet(self.fx.s, self.fx.s, self.fx.s) self.assert_meet(self.fx.t, self.fx.s, NoneType()) def test_none(self) -> None: self.assert_meet(NoneType(), NoneType(), NoneType()) self.assert_meet(NoneType(), self.fx.anyt, NoneType()) # Any type t joined with None results in None, unless t is Any. for t in [ self.fx.a, self.fx.o, UnboundType("x"), self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b), ]: self.assert_meet(t, NoneType(), NoneType()) def test_unbound_type(self) -> None: self.assert_meet(UnboundType("x"), UnboundType("x"), self.fx.anyt) self.assert_meet(UnboundType("x"), UnboundType("y"), self.fx.anyt) self.assert_meet(UnboundType("x"), self.fx.anyt, UnboundType("x")) # The meet of any type t with an unbound type results in dynamic. # Unbound type means that there is an error somewhere in the program, # so this does not affect type safety. for t in [ self.fx.a, self.fx.o, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b), ]: self.assert_meet(t, UnboundType("X"), self.fx.anyt) def test_dynamic_type(self) -> None: # Meet against dynamic type always results in dynamic. for t in [ self.fx.anyt, self.fx.a, self.fx.o, NoneType(), UnboundType("x"), self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b), ]: self.assert_meet(t, self.fx.anyt, t) def test_simple_generics(self) -> None: self.assert_meet(self.fx.ga, self.fx.ga, self.fx.ga) self.assert_meet(self.fx.ga, self.fx.o, self.fx.ga) self.assert_meet(self.fx.ga, self.fx.gb, self.fx.gb) self.assert_meet(self.fx.ga, self.fx.gd, self.fx.nonet) self.assert_meet(self.fx.ga, self.fx.g2a, self.fx.nonet) self.assert_meet(self.fx.ga, self.fx.nonet, self.fx.nonet) self.assert_meet(self.fx.ga, self.fx.anyt, self.fx.ga) for t in [self.fx.a, self.fx.t, self.tuple(), self.callable(self.fx.a, self.fx.b)]: self.assert_meet(t, self.fx.ga, self.fx.nonet) def test_generics_with_multiple_args(self) -> None: self.assert_meet(self.fx.hab, self.fx.hab, self.fx.hab) self.assert_meet(self.fx.hab, self.fx.haa, self.fx.hab) self.assert_meet(self.fx.hab, self.fx.had, self.fx.nonet) self.assert_meet(self.fx.hab, self.fx.hbb, self.fx.hbb) def test_generics_with_inheritance(self) -> None: self.assert_meet(self.fx.gsab, self.fx.gb, self.fx.gsab) self.assert_meet(self.fx.gsba, self.fx.gb, self.fx.nonet) def test_generics_with_inheritance_and_shared_supertype(self) -> None: self.assert_meet(self.fx.gsba, self.fx.gs2a, self.fx.nonet) self.assert_meet(self.fx.gsab, self.fx.gs2a, self.fx.nonet) def test_generic_types_and_dynamic(self) -> None: self.assert_meet(self.fx.gdyn, self.fx.ga, self.fx.ga) def test_callables_with_dynamic(self) -> None: self.assert_meet( self.callable(self.fx.a, self.fx.a, self.fx.anyt, self.fx.a), self.callable(self.fx.a, self.fx.anyt, self.fx.a, self.fx.anyt), self.callable(self.fx.a, self.fx.anyt, self.fx.anyt, self.fx.anyt), ) def test_meet_interface_types(self) -> None: self.assert_meet(self.fx.f, self.fx.f, self.fx.f) self.assert_meet(self.fx.f, self.fx.f2, self.fx.nonet) self.assert_meet(self.fx.f, self.fx.f3, self.fx.f3) def test_meet_interface_and_class_types(self) -> None: self.assert_meet(self.fx.o, self.fx.f, self.fx.f) self.assert_meet(self.fx.a, self.fx.f, self.fx.nonet) self.assert_meet(self.fx.e, self.fx.f, self.fx.e) def test_meet_class_types_with_shared_interfaces(self) -> None: # These have nothing special with respect to meets, unlike joins. These # are for completeness only. self.assert_meet(self.fx.e, self.fx.e2, self.fx.nonet) self.assert_meet(self.fx.e2, self.fx.e3, self.fx.nonet) def test_meet_with_generic_interfaces(self) -> None: fx = InterfaceTypeFixture() self.assert_meet(fx.gfa, fx.m1, fx.m1) self.assert_meet(fx.gfa, fx.gfa, fx.gfa) self.assert_meet(fx.gfb, fx.m1, fx.nonet) def test_type_type(self) -> None: self.assert_meet(self.fx.type_a, self.fx.type_b, self.fx.type_b) self.assert_meet(self.fx.type_b, self.fx.type_any, self.fx.type_b) self.assert_meet(self.fx.type_b, self.fx.type_type, self.fx.type_b) self.assert_meet(self.fx.type_b, self.fx.type_c, self.fx.nonet) self.assert_meet(self.fx.type_c, self.fx.type_d, self.fx.nonet) self.assert_meet(self.fx.type_type, self.fx.type_any, self.fx.type_any) self.assert_meet(self.fx.type_b, self.fx.anyt, self.fx.type_b) def test_literal_type(self) -> None: a = self.fx.a lit1 = self.fx.lit1 lit2 = self.fx.lit2 lit3 = self.fx.lit3 self.assert_meet(lit1, lit1, lit1) self.assert_meet(lit1, a, lit1) self.assert_meet_uninhabited(lit1, lit3) self.assert_meet_uninhabited(lit1, lit2) self.assert_meet(UnionType([lit1, lit2]), lit1, lit1) self.assert_meet(UnionType([lit1, lit2]), UnionType([lit2, lit3]), lit2) self.assert_meet(UnionType([lit1, lit2]), UnionType([lit1, lit2]), UnionType([lit1, lit2])) self.assert_meet(lit1, self.fx.anyt, lit1) self.assert_meet(lit1, self.fx.o, lit1) assert is_same_type(lit1, narrow_declared_type(lit1, a)) assert is_same_type(lit2, narrow_declared_type(lit2, a)) # FIX generic interfaces + ranges def assert_meet_uninhabited(self, s: Type, t: Type) -> None: with state.strict_optional_set(False): self.assert_meet(s, t, self.fx.nonet) with state.strict_optional_set(True): self.assert_meet(s, t, self.fx.uninhabited) def test_variadic_tuple_meets(self) -> None: # These tests really test just the "arity", to be sure it is handled correctly. self.assert_meet( self.tuple(self.fx.a, self.fx.a), self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), self.tuple(self.fx.a, self.fx.a), ) self.assert_meet( self.tuple(self.fx.a, self.fx.a), self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), self.tuple(self.fx.a, self.fx.a), ) self.assert_meet( self.tuple(self.fx.a, self.fx.a), self.tuple(self.fx.a, UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), self.tuple(self.fx.a, self.fx.a), ) self.assert_meet( self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a]))), ) self.assert_meet( self.tuple(UnpackType(Instance(self.fx.std_tuplei, [self.fx.a])), self.fx.a), self.tuple(self.fx.b, UnpackType(Instance(self.fx.std_tuplei, [self.fx.b]))), self.tuple(self.fx.b, UnpackType(Instance(self.fx.std_tuplei, [self.fx.b]))), ) def assert_meet(self, s: Type, t: Type, meet: Type) -> None: self.assert_simple_meet(s, t, meet) self.assert_simple_meet(t, s, meet) def assert_simple_meet(self, s: Type, t: Type, meet: Type) -> None: result = meet_types(s, t) actual = str(result) expected = str(meet) assert_equal(actual, expected, f"meet({s}, {t}) == {{}} ({{}} expected)") assert is_subtype(result, s), f"{result} not subtype of {s}" assert is_subtype(result, t), f"{result} not subtype of {t}" def tuple(self, *a: Type) -> TupleType: return TupleType(list(a), self.fx.std_tuple) def callable(self, *a: Type) -> CallableType: """callable(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r. """ n = len(a) - 1 return CallableType(list(a[:-1]), [ARG_POS] * n, [None] * n, a[-1], self.fx.function) class SameTypeSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture() def test_literal_type(self) -> None: a = self.fx.a b = self.fx.b # Reminder: b is a subclass of a lit1 = self.fx.lit1 lit2 = self.fx.lit2 lit3 = self.fx.lit3 self.assert_same(lit1, lit1) self.assert_same(UnionType([lit1, lit2]), UnionType([lit1, lit2])) self.assert_same(UnionType([lit1, lit2]), UnionType([lit2, lit1])) self.assert_same(UnionType([a, b]), UnionType([b, a])) self.assert_not_same(lit1, b) self.assert_not_same(lit1, lit2) self.assert_not_same(lit1, lit3) self.assert_not_same(lit1, self.fx.anyt) self.assert_not_same(lit1, self.fx.nonet) def assert_same(self, s: Type, t: Type, strict: bool = True) -> None: self.assert_simple_is_same(s, t, expected=True, strict=strict) self.assert_simple_is_same(t, s, expected=True, strict=strict) def assert_not_same(self, s: Type, t: Type, strict: bool = True) -> None: self.assert_simple_is_same(s, t, False, strict=strict) self.assert_simple_is_same(t, s, False, strict=strict) def assert_simple_is_same(self, s: Type, t: Type, expected: bool, strict: bool) -> None: actual = is_same_type(s, t) assert_equal(actual, expected, f"is_same_type({s}, {t}) is {{}} ({{}} expected)") if strict: actual2 = s == t assert_equal(actual2, expected, f"({s} == {t}) is {{}} ({{}} expected)") assert_equal( hash(s) == hash(t), expected, f"(hash({s}) == hash({t}) is {{}} ({{}} expected)" ) class RemoveLastKnownValueSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture() def test_optional(self) -> None: t = UnionType.make_union([self.fx.a, self.fx.nonet]) self.assert_union_result(t, [self.fx.a, self.fx.nonet]) def test_two_instances(self) -> None: t = UnionType.make_union([self.fx.a, self.fx.b]) self.assert_union_result(t, [self.fx.a, self.fx.b]) def test_multiple_same_instances(self) -> None: t = UnionType.make_union([self.fx.a, self.fx.a]) assert remove_instance_last_known_values(t) == self.fx.a t = UnionType.make_union([self.fx.a, self.fx.a, self.fx.b]) self.assert_union_result(t, [self.fx.a, self.fx.b]) t = UnionType.make_union([self.fx.a, self.fx.nonet, self.fx.a, self.fx.b]) self.assert_union_result(t, [self.fx.a, self.fx.nonet, self.fx.b]) def test_single_last_known_value(self) -> None: t = UnionType.make_union([self.fx.lit1_inst, self.fx.nonet]) self.assert_union_result(t, [self.fx.a, self.fx.nonet]) def test_last_known_values_with_merge(self) -> None: t = UnionType.make_union([self.fx.lit1_inst, self.fx.lit2_inst, self.fx.lit4_inst]) assert remove_instance_last_known_values(t) == self.fx.a t = UnionType.make_union( [self.fx.lit1_inst, self.fx.b, self.fx.lit2_inst, self.fx.lit4_inst] ) self.assert_union_result(t, [self.fx.a, self.fx.b]) def test_generics(self) -> None: t = UnionType.make_union([self.fx.ga, self.fx.gb]) self.assert_union_result(t, [self.fx.ga, self.fx.gb]) def assert_union_result(self, t: ProperType, expected: list[Type]) -> None: t2 = remove_instance_last_known_values(t) assert type(t2) is UnionType assert t2.items == expected class ShallowOverloadMatchingSuite(Suite): def setUp(self) -> None: self.fx = TypeFixture() def test_simple(self) -> None: fx = self.fx ov = self.make_overload([[("x", fx.anyt, ARG_NAMED)], [("y", fx.anyt, ARG_NAMED)]]) # Match first only self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "x")), 0) # Match second only self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "y")), 1) # No match -- invalid keyword arg name self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "z")), 1) # No match -- missing arg self.assert_find_shallow_matching_overload_item(ov, make_call(), 1) # No match -- extra arg self.assert_find_shallow_matching_overload_item( ov, make_call(("foo", "x"), ("foo", "z")), 1 ) def test_match_using_types(self) -> None: fx = self.fx ov = self.make_overload( [ [("x", fx.nonet, ARG_POS)], [("x", fx.lit_false, ARG_POS)], [("x", fx.lit_true, ARG_POS)], [("x", fx.anyt, ARG_POS)], ] ) self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 0) self.assert_find_shallow_matching_overload_item(ov, make_call(("builtins.False", None)), 1) self.assert_find_shallow_matching_overload_item(ov, make_call(("builtins.True", None)), 2) self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", None)), 3) def test_none_special_cases(self) -> None: fx = self.fx ov = self.make_overload( [[("x", fx.callable(fx.nonet), ARG_POS)], [("x", fx.nonet, ARG_POS)]] ) self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 1) self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) ov = self.make_overload([[("x", fx.str_type, ARG_POS)], [("x", fx.nonet, ARG_POS)]]) self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 1) self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) ov = self.make_overload( [[("x", UnionType([fx.str_type, fx.a]), ARG_POS)], [("x", fx.nonet, ARG_POS)]] ) self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 1) self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) ov = self.make_overload([[("x", fx.o, ARG_POS)], [("x", fx.nonet, ARG_POS)]]) self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 0) self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) ov = self.make_overload( [[("x", UnionType([fx.str_type, fx.nonet]), ARG_POS)], [("x", fx.nonet, ARG_POS)]] ) self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 0) self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) ov = self.make_overload([[("x", fx.anyt, ARG_POS)], [("x", fx.nonet, ARG_POS)]]) self.assert_find_shallow_matching_overload_item(ov, make_call(("None", None)), 0) self.assert_find_shallow_matching_overload_item(ov, make_call(("func", None)), 0) def test_optional_arg(self) -> None: fx = self.fx ov = self.make_overload( [[("x", fx.anyt, ARG_NAMED)], [("y", fx.anyt, ARG_OPT)], [("z", fx.anyt, ARG_NAMED)]] ) self.assert_find_shallow_matching_overload_item(ov, make_call(), 1) self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "x")), 0) self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "y")), 1) self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "z")), 2) def test_two_args(self) -> None: fx = self.fx ov = self.make_overload( [ [("x", fx.nonet, ARG_OPT), ("y", fx.anyt, ARG_OPT)], [("x", fx.anyt, ARG_OPT), ("y", fx.anyt, ARG_OPT)], ] ) self.assert_find_shallow_matching_overload_item(ov, make_call(), 0) self.assert_find_shallow_matching_overload_item(ov, make_call(("None", "x")), 0) self.assert_find_shallow_matching_overload_item(ov, make_call(("foo", "x")), 1) self.assert_find_shallow_matching_overload_item( ov, make_call(("foo", "y"), ("None", "x")), 0 ) self.assert_find_shallow_matching_overload_item( ov, make_call(("foo", "y"), ("bar", "x")), 1 ) def assert_find_shallow_matching_overload_item( self, ov: Overloaded, call: CallExpr, expected_index: int ) -> None: c = find_shallow_matching_overload_item(ov, call) assert c in ov.items assert ov.items.index(c) == expected_index def make_overload(self, items: list[list[tuple[str, Type, ArgKind]]]) -> Overloaded: result = [] for item in items: arg_types = [] arg_names = [] arg_kinds = [] for name, typ, kind in item: arg_names.append(name) arg_types.append(typ) arg_kinds.append(kind) result.append( CallableType( arg_types, arg_kinds, arg_names, ret_type=NoneType(), fallback=self.fx.o ) ) return Overloaded(result) def make_call(*items: tuple[str, str | None]) -> CallExpr: args: list[Expression] = [] arg_names = [] arg_kinds = [] for arg, name in items: shortname = arg.split(".")[-1] n = NameExpr(shortname) n.fullname = arg args.append(n) arg_names.append(name) if name: arg_kinds.append(ARG_NAMED) else: arg_kinds.append(ARG_POS) return CallExpr(NameExpr("f"), args, arg_kinds, arg_names) class TestExpandTypeLimitGetProperType(TestCase): # WARNING: do not increase this number unless absolutely necessary, # and you understand what you are doing. ALLOWED_GET_PROPER_TYPES = 9 @skipUnless(mypy.expandtype.__file__.endswith(".py"), "Skip for compiled mypy") def test_count_get_proper_type(self) -> None: with open(mypy.expandtype.__file__) as f: code = f.read() get_proper_type_count = len(re.findall("get_proper_type", code)) assert get_proper_type_count == self.ALLOWED_GET_PROPER_TYPES
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testtypes.py
Python
NOASSERTION
62,301
from __future__ import annotations import os from unittest import TestCase, mock from mypy.inspections import parse_location from mypy.util import _generate_junit_contents, get_terminal_width class TestGetTerminalSize(TestCase): def test_get_terminal_size_in_pty_defaults_to_80(self) -> None: # when run using a pty, `os.get_terminal_size()` returns `0, 0` ret = os.terminal_size((0, 0)) mock_environ = os.environ.copy() mock_environ.pop("COLUMNS", None) with mock.patch.object(os, "get_terminal_size", return_value=ret): with mock.patch.dict(os.environ, values=mock_environ, clear=True): assert get_terminal_width() == 80 def test_parse_location_windows(self) -> None: assert parse_location(r"C:\test.py:1:1") == (r"C:\test.py", [1, 1]) assert parse_location(r"C:\test.py:1:1:1:1") == (r"C:\test.py", [1, 1, 1, 1]) class TestWriteJunitXml(TestCase): def test_junit_pass(self) -> None: serious = False messages_by_file: dict[str | None, list[str]] = {} expected = """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="0" name="mypy" skips="0" tests="1" time="1.230"> <testcase classname="mypy" file="mypy" line="1" name="mypy-py3.14-test-plat" time="1.230"> </testcase> </testsuite> """ result = _generate_junit_contents( dt=1.23, serious=serious, messages_by_file=messages_by_file, version="3.14", platform="test-plat", ) assert result == expected def test_junit_fail_escape_xml_chars(self) -> None: serious = False messages_by_file: dict[str | None, list[str]] = { "file1.py": ["Test failed", "another line < > &"] } expected = """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="1" name="mypy" skips="0" tests="1" time="1.230"> <testcase classname="mypy" file="file1.py" line="1" name="mypy-py3.14-test-plat file1.py" time="1.230"> <failure message="mypy produced messages">Test failed another line &lt; &gt; &amp;</failure> </testcase> </testsuite> """ result = _generate_junit_contents( dt=1.23, serious=serious, messages_by_file=messages_by_file, version="3.14", platform="test-plat", ) assert result == expected def test_junit_fail_two_files(self) -> None: serious = False messages_by_file: dict[str | None, list[str]] = { "file1.py": ["Test failed", "another line"], "file2.py": ["Another failure", "line 2"], } expected = """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="0" failures="2" name="mypy" skips="0" tests="2" time="1.230"> <testcase classname="mypy" file="file1.py" line="1" name="mypy-py3.14-test-plat file1.py" time="1.230"> <failure message="mypy produced messages">Test failed another line</failure> </testcase> <testcase classname="mypy" file="file2.py" line="1" name="mypy-py3.14-test-plat file2.py" time="1.230"> <failure message="mypy produced messages">Another failure line 2</failure> </testcase> </testsuite> """ result = _generate_junit_contents( dt=1.23, serious=serious, messages_by_file=messages_by_file, version="3.14", platform="test-plat", ) assert result == expected def test_serious_error(self) -> None: serious = True messages_by_file: dict[str | None, list[str]] = {None: ["Error line 1", "Error line 2"]} expected = """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="1" failures="0" name="mypy" skips="0" tests="1" time="1.230"> <testcase classname="mypy" file="mypy" line="1" name="mypy-py3.14-test-plat" time="1.230"> <failure message="mypy produced messages">Error line 1 Error line 2</failure> </testcase> </testsuite> """ result = _generate_junit_contents( dt=1.23, serious=serious, messages_by_file=messages_by_file, version="3.14", platform="test-plat", ) assert result == expected
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/testutil.py
Python
NOASSERTION
4,233
"""Fixture used in type-related test cases. It contains class TypeInfos and Type objects. """ from __future__ import annotations from mypy.nodes import ( ARG_OPT, ARG_POS, ARG_STAR, COVARIANT, MDEF, Block, ClassDef, FuncDef, SymbolTable, SymbolTableNode, TypeAlias, TypeInfo, ) from mypy.semanal_shared import set_callable_name from mypy.types import ( AnyType, CallableType, Instance, LiteralType, NoneType, Type, TypeAliasType, TypeOfAny, TypeType, TypeVarId, TypeVarLikeType, TypeVarTupleType, TypeVarType, UninhabitedType, UnionType, ) class TypeFixture: """Helper class that is used as a fixture in type-related unit tests. The members are initialized to contain various type-related values. """ def __init__(self, variance: int = COVARIANT) -> None: # The 'object' class self.oi = self.make_type_info("builtins.object") # class object self.o = Instance(self.oi, []) # object # Type variables (these are effectively global) def make_type_var( name: str, id: int, values: list[Type], upper_bound: Type, variance: int ) -> TypeVarType: return TypeVarType( name, name, TypeVarId(id), values, upper_bound, AnyType(TypeOfAny.from_omitted_generics), variance, ) self.t = make_type_var("T", 1, [], self.o, variance) # T`1 (type variable) self.tf = make_type_var("T", -1, [], self.o, variance) # T`-1 (type variable) self.tf2 = make_type_var("T", -2, [], self.o, variance) # T`-2 (type variable) self.s = make_type_var("S", 2, [], self.o, variance) # S`2 (type variable) self.s1 = make_type_var("S", 1, [], self.o, variance) # S`1 (type variable) self.sf = make_type_var("S", -2, [], self.o, variance) # S`-2 (type variable) self.sf1 = make_type_var("S", -1, [], self.o, variance) # S`-1 (type variable) self.u = make_type_var("U", 3, [], self.o, variance) # U`3 (type variable) # Simple types self.anyt = AnyType(TypeOfAny.special_form) self.nonet = NoneType() self.uninhabited = UninhabitedType() # Abstract class TypeInfos # class F self.fi = self.make_type_info("F", is_abstract=True) # class F2 self.f2i = self.make_type_info("F2", is_abstract=True) # class F3(F) self.f3i = self.make_type_info("F3", is_abstract=True, mro=[self.fi]) # Class TypeInfos self.std_tuplei = self.make_type_info( "builtins.tuple", mro=[self.oi], typevars=["T"], variances=[COVARIANT] ) # class tuple self.type_typei = self.make_type_info("builtins.type") # class type self.bool_type_info = self.make_type_info("builtins.bool") self.str_type_info = self.make_type_info("builtins.str") self.functioni = self.make_type_info("builtins.function") # function TODO self.ai = self.make_type_info("A", mro=[self.oi]) # class A self.bi = self.make_type_info("B", mro=[self.ai, self.oi]) # class B(A) self.ci = self.make_type_info("C", mro=[self.ai, self.oi]) # class C(A) self.di = self.make_type_info("D", mro=[self.oi]) # class D # class E(F) self.ei = self.make_type_info("E", mro=[self.fi, self.oi]) # class E2(F2, F) self.e2i = self.make_type_info("E2", mro=[self.f2i, self.fi, self.oi]) # class E3(F, F2) self.e3i = self.make_type_info("E3", mro=[self.fi, self.f2i, self.oi]) # Generic class TypeInfos # G[T] self.gi = self.make_type_info("G", mro=[self.oi], typevars=["T"], variances=[variance]) # G2[T] self.g2i = self.make_type_info("G2", mro=[self.oi], typevars=["T"], variances=[variance]) # H[S, T] self.hi = self.make_type_info( "H", mro=[self.oi], typevars=["S", "T"], variances=[variance, variance] ) # GS[T, S] <: G[S] self.gsi = self.make_type_info( "GS", mro=[self.gi, self.oi], typevars=["T", "S"], variances=[variance, variance], bases=[Instance(self.gi, [self.s])], ) # GS2[S] <: G[S] self.gs2i = self.make_type_info( "GS2", mro=[self.gi, self.oi], typevars=["S"], variances=[variance], bases=[Instance(self.gi, [self.s1])], ) # list[T] self.std_listi = self.make_type_info( "builtins.list", mro=[self.oi], typevars=["T"], variances=[variance] ) # Instance types self.std_tuple = Instance(self.std_tuplei, [self.anyt]) # tuple self.type_type = Instance(self.type_typei, []) # type self.function = Instance(self.functioni, []) # function TODO self.str_type = Instance(self.str_type_info, []) self.bool_type = Instance(self.bool_type_info, []) self.a = Instance(self.ai, []) # A self.b = Instance(self.bi, []) # B self.c = Instance(self.ci, []) # C self.d = Instance(self.di, []) # D self.e = Instance(self.ei, []) # E self.e2 = Instance(self.e2i, []) # E2 self.e3 = Instance(self.e3i, []) # E3 self.f = Instance(self.fi, []) # F self.f2 = Instance(self.f2i, []) # F2 self.f3 = Instance(self.f3i, []) # F3 # Generic instance types self.ga = Instance(self.gi, [self.a]) # G[A] self.gb = Instance(self.gi, [self.b]) # G[B] self.gd = Instance(self.gi, [self.d]) # G[D] self.go = Instance(self.gi, [self.o]) # G[object] self.gt = Instance(self.gi, [self.t]) # G[T`1] self.gtf = Instance(self.gi, [self.tf]) # G[T`-1] self.gtf2 = Instance(self.gi, [self.tf2]) # G[T`-2] self.gs = Instance(self.gi, [self.s]) # G[S] self.gdyn = Instance(self.gi, [self.anyt]) # G[Any] self.gn = Instance(self.gi, [NoneType()]) # G[None] self.g2a = Instance(self.g2i, [self.a]) # G2[A] self.gsaa = Instance(self.gsi, [self.a, self.a]) # GS[A, A] self.gsab = Instance(self.gsi, [self.a, self.b]) # GS[A, B] self.gsba = Instance(self.gsi, [self.b, self.a]) # GS[B, A] self.gs2a = Instance(self.gs2i, [self.a]) # GS2[A] self.gs2b = Instance(self.gs2i, [self.b]) # GS2[B] self.gs2d = Instance(self.gs2i, [self.d]) # GS2[D] self.hab = Instance(self.hi, [self.a, self.b]) # H[A, B] self.haa = Instance(self.hi, [self.a, self.a]) # H[A, A] self.hbb = Instance(self.hi, [self.b, self.b]) # H[B, B] self.hts = Instance(self.hi, [self.t, self.s]) # H[T, S] self.had = Instance(self.hi, [self.a, self.d]) # H[A, D] self.hao = Instance(self.hi, [self.a, self.o]) # H[A, object] self.lsta = Instance(self.std_listi, [self.a]) # List[A] self.lstb = Instance(self.std_listi, [self.b]) # List[B] self.lit1 = LiteralType(1, self.a) self.lit2 = LiteralType(2, self.a) self.lit3 = LiteralType("foo", self.d) self.lit4 = LiteralType(4, self.a) self.lit1_inst = Instance(self.ai, [], last_known_value=self.lit1) self.lit2_inst = Instance(self.ai, [], last_known_value=self.lit2) self.lit3_inst = Instance(self.di, [], last_known_value=self.lit3) self.lit4_inst = Instance(self.ai, [], last_known_value=self.lit4) self.lit_str1 = LiteralType("x", self.str_type) self.lit_str2 = LiteralType("y", self.str_type) self.lit_str3 = LiteralType("z", self.str_type) self.lit_str1_inst = Instance(self.str_type_info, [], last_known_value=self.lit_str1) self.lit_str2_inst = Instance(self.str_type_info, [], last_known_value=self.lit_str2) self.lit_str3_inst = Instance(self.str_type_info, [], last_known_value=self.lit_str3) self.lit_false = LiteralType(False, self.bool_type) self.lit_true = LiteralType(True, self.bool_type) self.type_a = TypeType.make_normalized(self.a) self.type_b = TypeType.make_normalized(self.b) self.type_c = TypeType.make_normalized(self.c) self.type_d = TypeType.make_normalized(self.d) self.type_t = TypeType.make_normalized(self.t) self.type_any = TypeType.make_normalized(self.anyt) self._add_bool_dunder(self.bool_type_info) self._add_bool_dunder(self.ai) # TypeVars with non-trivial bounds self.ub = make_type_var("UB", 5, [], self.b, variance) # UB`5 (type variable) self.uc = make_type_var("UC", 6, [], self.c, variance) # UC`6 (type variable) def make_type_var_tuple(name: str, id: int, upper_bound: Type) -> TypeVarTupleType: return TypeVarTupleType( name, name, TypeVarId(id), upper_bound, self.std_tuple, AnyType(TypeOfAny.from_omitted_generics), ) obj_tuple = self.std_tuple.copy_modified(args=[self.o]) self.ts = make_type_var_tuple("Ts", 1, obj_tuple) # Ts`1 (type var tuple) self.ss = make_type_var_tuple("Ss", 2, obj_tuple) # Ss`2 (type var tuple) self.us = make_type_var_tuple("Us", 3, obj_tuple) # Us`3 (type var tuple) self.gvi = self.make_type_info("GV", mro=[self.oi], typevars=["Ts"], typevar_tuple_index=0) self.gv2i = self.make_type_info( "GV2", mro=[self.oi], typevars=["T", "Ts", "S"], typevar_tuple_index=1 ) def _add_bool_dunder(self, type_info: TypeInfo) -> None: signature = CallableType([], [], [], Instance(self.bool_type_info, []), self.function) bool_func = FuncDef("__bool__", [], Block([])) bool_func.type = set_callable_name(signature, bool_func) type_info.names[bool_func.name] = SymbolTableNode(MDEF, bool_func) # Helper methods def callable(self, *a: Type) -> CallableType: """callable(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r. """ return CallableType( list(a[:-1]), [ARG_POS] * (len(a) - 1), [None] * (len(a) - 1), a[-1], self.function ) def callable_type(self, *a: Type) -> CallableType: """callable_type(a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r, and which represents a type. """ return CallableType( list(a[:-1]), [ARG_POS] * (len(a) - 1), [None] * (len(a) - 1), a[-1], self.type_type ) def callable_default(self, min_args: int, *a: Type) -> CallableType: """callable_default(min_args, a1, ..., an, r) constructs a callable with argument types a1, ... an and return type r, with min_args mandatory fixed arguments. """ n = len(a) - 1 return CallableType( list(a[:-1]), [ARG_POS] * min_args + [ARG_OPT] * (n - min_args), [None] * n, a[-1], self.function, ) def callable_var_arg(self, min_args: int, *a: Type) -> CallableType: """callable_var_arg(min_args, a1, ..., an, r) constructs a callable with argument types a1, ... *an and return type r. """ n = len(a) - 1 return CallableType( list(a[:-1]), [ARG_POS] * min_args + [ARG_OPT] * (n - 1 - min_args) + [ARG_STAR], [None] * n, a[-1], self.function, ) def make_type_info( self, name: str, module_name: str | None = None, is_abstract: bool = False, mro: list[TypeInfo] | None = None, bases: list[Instance] | None = None, typevars: list[str] | None = None, typevar_tuple_index: int | None = None, variances: list[int] | None = None, ) -> TypeInfo: """Make a TypeInfo suitable for use in unit tests.""" class_def = ClassDef(name, Block([]), None, []) class_def.fullname = name if module_name is None: if "." in name: module_name = name.rsplit(".", 1)[0] else: module_name = "__main__" if typevars: v: list[TypeVarLikeType] = [] for id, n in enumerate(typevars, 1): if typevar_tuple_index is not None and id - 1 == typevar_tuple_index: v.append( TypeVarTupleType( n, n, TypeVarId(id), self.std_tuple.copy_modified(args=[self.o]), self.std_tuple.copy_modified(args=[self.o]), AnyType(TypeOfAny.from_omitted_generics), ) ) else: if variances: variance = variances[id - 1] else: variance = COVARIANT v.append( TypeVarType( n, n, TypeVarId(id), [], self.o, AnyType(TypeOfAny.from_omitted_generics), variance=variance, ) ) class_def.type_vars = v info = TypeInfo(SymbolTable(), class_def, module_name) if mro is None: mro = [] if name != "builtins.object": mro.append(self.oi) info.mro = [info] + mro if bases is None: if mro: # By default, assume that there is a single non-generic base. bases = [Instance(mro[0], [])] else: bases = [] info.bases = bases return info def def_alias_1(self, base: Instance) -> tuple[TypeAliasType, Type]: A = TypeAliasType(None, []) target = Instance( self.std_tuplei, [UnionType([base, A])] ) # A = Tuple[Union[base, A], ...] AN = TypeAlias(target, "__main__.A", -1, -1) A.alias = AN return A, target def def_alias_2(self, base: Instance) -> tuple[TypeAliasType, Type]: A = TypeAliasType(None, []) target = UnionType( [base, Instance(self.std_tuplei, [A])] ) # A = Union[base, Tuple[A, ...]] AN = TypeAlias(target, "__main__.A", -1, -1) A.alias = AN return A, target def non_rec_alias( self, target: Type, alias_tvars: list[TypeVarLikeType] | None = None, args: list[Type] | None = None, ) -> TypeAliasType: AN = TypeAlias(target, "__main__.A", -1, -1, alias_tvars=alias_tvars) if args is None: args = [] return TypeAliasType(AN, args) class InterfaceTypeFixture(TypeFixture): """Extension of TypeFixture that contains additional generic interface types.""" def __init__(self) -> None: super().__init__() # GF[T] self.gfi = self.make_type_info("GF", typevars=["T"], is_abstract=True) # M1 <: GF[A] self.m1i = self.make_type_info( "M1", is_abstract=True, mro=[self.gfi, self.oi], bases=[Instance(self.gfi, [self.a])] ) self.gfa = Instance(self.gfi, [self.a]) # GF[A] self.gfb = Instance(self.gfi, [self.b]) # GF[B] self.m1 = Instance(self.m1i, []) # M1
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/typefixture.py
Python
NOASSERTION
15,817
from __future__ import annotations import re from collections import defaultdict from typing import Iterator from mypy.test.data import DataDrivenTestCase, DataFileCollector, DataFileFix, parse_test_data def update_testcase_output( testcase: DataDrivenTestCase, actual: list[str], *, incremental_step: int ) -> None: if testcase.xfail: return collector = testcase.parent assert isinstance(collector, DataFileCollector) for fix in _iter_fixes(testcase, actual, incremental_step=incremental_step): collector.enqueue_fix(fix) def _iter_fixes( testcase: DataDrivenTestCase, actual: list[str], *, incremental_step: int ) -> Iterator[DataFileFix]: reports_by_line: dict[tuple[str, int], list[tuple[str, str]]] = defaultdict(list) for error_line in actual: comment_match = re.match( r"^(?P<filename>[^:]+):(?P<lineno>\d+): (?P<severity>error|note|warning): (?P<msg>.+)$", error_line, ) if comment_match: filename = comment_match.group("filename") lineno = int(comment_match.group("lineno")) severity = comment_match.group("severity") msg = comment_match.group("msg") reports_by_line[filename, lineno].append((severity, msg)) test_items = parse_test_data(testcase.data, testcase.name) # If we have [out] and/or [outN], we update just those sections. if any(re.match(r"^out\d*$", test_item.id) for test_item in test_items): for test_item in test_items: if (incremental_step < 2 and test_item.id == "out") or ( incremental_step >= 2 and test_item.id == f"out{incremental_step}" ): yield DataFileFix( lineno=testcase.line + test_item.line - 1, end_lineno=testcase.line + test_item.end_line - 1, lines=actual + [""] * test_item.trimmed_newlines, ) return # Update assertion comments within the sections for test_item in test_items: if test_item.id == "case": source_lines = test_item.data file_path = "main" elif test_item.id == "file": source_lines = test_item.data file_path = f"tmp/{test_item.arg}" else: continue # other sections we don't touch fix_lines = [] for lineno, source_line in enumerate(source_lines, start=1): reports = reports_by_line.get((file_path, lineno)) comment_match = re.search(r"(?P<indent>\s+)(?P<comment># [EWN]: .+)$", source_line) if comment_match: source_line = source_line[: comment_match.start("indent")] # strip old comment if reports: indent = comment_match.group("indent") if comment_match else " " # multiline comments are on the first line and then on subsequent lines emtpy lines # with a continuation backslash for j, (severity, msg) in enumerate(reports): out_l = source_line if j == 0 else " " * len(source_line) is_last = j == len(reports) - 1 severity_char = severity[0].upper() continuation = "" if is_last else " \\" fix_lines.append(f"{out_l}{indent}# {severity_char}: {msg}{continuation}") else: fix_lines.append(source_line) yield DataFileFix( lineno=testcase.line + test_item.line - 1, end_lineno=testcase.line + test_item.end_line - 1, lines=fix_lines + [""] * test_item.trimmed_newlines, )
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/update_data.py
Python
NOASSERTION
3,676
"""Visitor classes pulled out from different tests These are here because we don't currently support having interpreted classes subtype compiled ones but pytest grabs the python file even if the test was compiled. """ from __future__ import annotations from mypy.nodes import AssignmentStmt, CallExpr, Expression, IntExpr, NameExpr, Node, TypeVarExpr from mypy.traverser import TraverserVisitor from mypy.treetransform import TransformVisitor from mypy.types import Type # from testtypegen class SkippedNodeSearcher(TraverserVisitor): def __init__(self) -> None: self.nodes: set[Node] = set() self.ignore_file = False def visit_assignment_stmt(self, s: AssignmentStmt) -> None: if s.type or ignore_node(s.rvalue): for lvalue in s.lvalues: if isinstance(lvalue, NameExpr): self.nodes.add(lvalue) super().visit_assignment_stmt(s) def visit_name_expr(self, n: NameExpr) -> None: if self.ignore_file: self.nodes.add(n) super().visit_name_expr(n) def visit_int_expr(self, n: IntExpr) -> None: if self.ignore_file: self.nodes.add(n) super().visit_int_expr(n) def ignore_node(node: Expression) -> bool: """Return True if node is to be omitted from test case output.""" # We want to get rid of object() expressions in the typing module stub # and also TypeVar(...) expressions. Since detecting whether a node comes # from the typing module is not easy, we just to strip them all away. if isinstance(node, TypeVarExpr): return True if isinstance(node, NameExpr) and node.fullname == "builtins.object": return True if isinstance(node, NameExpr) and node.fullname == "builtins.None": return True if isinstance(node, CallExpr) and (ignore_node(node.callee) or node.analyzed): return True return False # from testtransform class TypeAssertTransformVisitor(TransformVisitor): def type(self, type: Type) -> Type: assert type is not None return type
algorandfoundation/puya
src/puyapy/_vendor/mypy/test/visitors.py
Python
NOASSERTION
2,089
"""Generic node traverser visitor""" from __future__ import annotations from mypy_extensions import mypyc_attr, trait from mypy.nodes import ( REVEAL_TYPE, AssertStmt, AssertTypeExpr, AssignmentExpr, AssignmentStmt, AwaitExpr, Block, BreakStmt, BytesExpr, CallExpr, CastExpr, ClassDef, ComparisonExpr, ComplexExpr, ConditionalExpr, ContinueStmt, Decorator, DelStmt, DictExpr, DictionaryComprehension, EllipsisExpr, EnumCallExpr, Expression, ExpressionStmt, FloatExpr, ForStmt, FuncBase, FuncDef, FuncItem, GeneratorExpr, GlobalDecl, IfStmt, Import, ImportAll, ImportFrom, IndexExpr, IntExpr, LambdaExpr, ListComprehension, ListExpr, MatchStmt, MemberExpr, MypyFile, NamedTupleExpr, NameExpr, NewTypeExpr, Node, NonlocalDecl, OperatorAssignmentStmt, OpExpr, OverloadedFuncDef, ParamSpecExpr, PassStmt, RaiseStmt, ReturnStmt, RevealExpr, SetComprehension, SetExpr, SliceExpr, StarExpr, StrExpr, SuperExpr, TryStmt, TupleExpr, TypeAlias, TypeAliasExpr, TypeAliasStmt, TypeApplication, TypedDictExpr, TypeVarExpr, TypeVarTupleExpr, UnaryExpr, WhileStmt, WithStmt, YieldExpr, YieldFromExpr, ) from mypy.patterns import ( AsPattern, ClassPattern, MappingPattern, OrPattern, SequencePattern, SingletonPattern, StarredPattern, ValuePattern, ) from mypy.visitor import NodeVisitor @trait @mypyc_attr(allow_interpreted_subclasses=True) class TraverserVisitor(NodeVisitor[None]): """A parse tree visitor that traverses the parse tree during visiting. It does not perform any actions outside the traversal. Subclasses should override visit methods to perform actions during traversal. Calling the superclass method allows reusing the traversal implementation. """ def __init__(self) -> None: pass # Visit methods def visit_mypy_file(self, o: MypyFile) -> None: for d in o.defs: d.accept(self) def visit_block(self, block: Block) -> None: for s in block.body: s.accept(self) def visit_func(self, o: FuncItem) -> None: if o.arguments is not None: for arg in o.arguments: init = arg.initializer if init is not None: init.accept(self) for arg in o.arguments: self.visit_var(arg.variable) o.body.accept(self) def visit_func_def(self, o: FuncDef) -> None: self.visit_func(o) def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: for item in o.items: item.accept(self) if o.impl: o.impl.accept(self) def visit_class_def(self, o: ClassDef) -> None: for d in o.decorators: d.accept(self) for base in o.base_type_exprs: base.accept(self) if o.metaclass: o.metaclass.accept(self) for v in o.keywords.values(): v.accept(self) o.defs.accept(self) if o.analyzed: o.analyzed.accept(self) def visit_decorator(self, o: Decorator) -> None: o.func.accept(self) o.var.accept(self) for decorator in o.decorators: decorator.accept(self) def visit_expression_stmt(self, o: ExpressionStmt) -> None: o.expr.accept(self) def visit_assignment_stmt(self, o: AssignmentStmt) -> None: o.rvalue.accept(self) for l in o.lvalues: l.accept(self) def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> None: o.rvalue.accept(self) o.lvalue.accept(self) def visit_while_stmt(self, o: WhileStmt) -> None: o.expr.accept(self) o.body.accept(self) if o.else_body: o.else_body.accept(self) def visit_for_stmt(self, o: ForStmt) -> None: o.index.accept(self) o.expr.accept(self) o.body.accept(self) if o.else_body: o.else_body.accept(self) def visit_return_stmt(self, o: ReturnStmt) -> None: if o.expr is not None: o.expr.accept(self) def visit_assert_stmt(self, o: AssertStmt) -> None: if o.expr is not None: o.expr.accept(self) if o.msg is not None: o.msg.accept(self) def visit_del_stmt(self, o: DelStmt) -> None: if o.expr is not None: o.expr.accept(self) def visit_if_stmt(self, o: IfStmt) -> None: for e in o.expr: e.accept(self) for b in o.body: b.accept(self) if o.else_body: o.else_body.accept(self) def visit_raise_stmt(self, o: RaiseStmt) -> None: if o.expr is not None: o.expr.accept(self) if o.from_expr is not None: o.from_expr.accept(self) def visit_try_stmt(self, o: TryStmt) -> None: o.body.accept(self) for i in range(len(o.types)): tp = o.types[i] if tp is not None: tp.accept(self) o.handlers[i].accept(self) for v in o.vars: if v is not None: v.accept(self) if o.else_body is not None: o.else_body.accept(self) if o.finally_body is not None: o.finally_body.accept(self) def visit_with_stmt(self, o: WithStmt) -> None: for i in range(len(o.expr)): o.expr[i].accept(self) targ = o.target[i] if targ is not None: targ.accept(self) o.body.accept(self) def visit_match_stmt(self, o: MatchStmt) -> None: o.subject.accept(self) for i in range(len(o.patterns)): o.patterns[i].accept(self) guard = o.guards[i] if guard is not None: guard.accept(self) o.bodies[i].accept(self) def visit_type_alias_stmt(self, o: TypeAliasStmt) -> None: o.name.accept(self) o.value.accept(self) def visit_member_expr(self, o: MemberExpr) -> None: o.expr.accept(self) def visit_yield_from_expr(self, o: YieldFromExpr) -> None: o.expr.accept(self) def visit_yield_expr(self, o: YieldExpr) -> None: if o.expr: o.expr.accept(self) def visit_call_expr(self, o: CallExpr) -> None: o.callee.accept(self) for a in o.args: a.accept(self) if o.analyzed: o.analyzed.accept(self) def visit_op_expr(self, o: OpExpr) -> None: o.left.accept(self) o.right.accept(self) if o.analyzed is not None: o.analyzed.accept(self) def visit_comparison_expr(self, o: ComparisonExpr) -> None: for operand in o.operands: operand.accept(self) def visit_slice_expr(self, o: SliceExpr) -> None: if o.begin_index is not None: o.begin_index.accept(self) if o.end_index is not None: o.end_index.accept(self) if o.stride is not None: o.stride.accept(self) def visit_cast_expr(self, o: CastExpr) -> None: o.expr.accept(self) def visit_assert_type_expr(self, o: AssertTypeExpr) -> None: o.expr.accept(self) def visit_reveal_expr(self, o: RevealExpr) -> None: if o.kind == REVEAL_TYPE: assert o.expr is not None o.expr.accept(self) else: # RevealLocalsExpr doesn't have an inner expression pass def visit_assignment_expr(self, o: AssignmentExpr) -> None: o.target.accept(self) o.value.accept(self) def visit_unary_expr(self, o: UnaryExpr) -> None: o.expr.accept(self) def visit_list_expr(self, o: ListExpr) -> None: for item in o.items: item.accept(self) def visit_tuple_expr(self, o: TupleExpr) -> None: for item in o.items: item.accept(self) def visit_dict_expr(self, o: DictExpr) -> None: for k, v in o.items: if k is not None: k.accept(self) v.accept(self) def visit_set_expr(self, o: SetExpr) -> None: for item in o.items: item.accept(self) def visit_index_expr(self, o: IndexExpr) -> None: o.base.accept(self) o.index.accept(self) if o.analyzed: o.analyzed.accept(self) def visit_generator_expr(self, o: GeneratorExpr) -> None: for index, sequence, conditions in zip(o.indices, o.sequences, o.condlists): sequence.accept(self) index.accept(self) for cond in conditions: cond.accept(self) o.left_expr.accept(self) def visit_dictionary_comprehension(self, o: DictionaryComprehension) -> None: for index, sequence, conditions in zip(o.indices, o.sequences, o.condlists): sequence.accept(self) index.accept(self) for cond in conditions: cond.accept(self) o.key.accept(self) o.value.accept(self) def visit_list_comprehension(self, o: ListComprehension) -> None: o.generator.accept(self) def visit_set_comprehension(self, o: SetComprehension) -> None: o.generator.accept(self) def visit_conditional_expr(self, o: ConditionalExpr) -> None: o.cond.accept(self) o.if_expr.accept(self) o.else_expr.accept(self) def visit_type_application(self, o: TypeApplication) -> None: o.expr.accept(self) def visit_lambda_expr(self, o: LambdaExpr) -> None: self.visit_func(o) def visit_star_expr(self, o: StarExpr) -> None: o.expr.accept(self) def visit_await_expr(self, o: AwaitExpr) -> None: o.expr.accept(self) def visit_super_expr(self, o: SuperExpr) -> None: o.call.accept(self) def visit_as_pattern(self, o: AsPattern) -> None: if o.pattern is not None: o.pattern.accept(self) if o.name is not None: o.name.accept(self) def visit_or_pattern(self, o: OrPattern) -> None: for p in o.patterns: p.accept(self) def visit_value_pattern(self, o: ValuePattern) -> None: o.expr.accept(self) def visit_sequence_pattern(self, o: SequencePattern) -> None: for p in o.patterns: p.accept(self) def visit_starred_pattern(self, o: StarredPattern) -> None: if o.capture is not None: o.capture.accept(self) def visit_mapping_pattern(self, o: MappingPattern) -> None: for key in o.keys: key.accept(self) for value in o.values: value.accept(self) if o.rest is not None: o.rest.accept(self) def visit_class_pattern(self, o: ClassPattern) -> None: o.class_ref.accept(self) for p in o.positionals: p.accept(self) for v in o.keyword_values: v.accept(self) def visit_import(self, o: Import) -> None: for a in o.assignments: a.accept(self) def visit_import_from(self, o: ImportFrom) -> None: for a in o.assignments: a.accept(self) class ExtendedTraverserVisitor(TraverserVisitor): """This is a more flexible traverser. In addition to the base traverser it: * has visit_ methods for leaf nodes * has common method that is called for all nodes * allows to skip recursing into a node Note that this traverser still doesn't visit some internal mypy constructs like _promote expression and Var. """ def visit(self, o: Node) -> bool: # If returns True, will continue to nested nodes. return True def visit_mypy_file(self, o: MypyFile) -> None: if not self.visit(o): return super().visit_mypy_file(o) # Module structure def visit_import(self, o: Import) -> None: if not self.visit(o): return super().visit_import(o) def visit_import_from(self, o: ImportFrom) -> None: if not self.visit(o): return super().visit_import_from(o) def visit_import_all(self, o: ImportAll) -> None: if not self.visit(o): return super().visit_import_all(o) # Definitions def visit_func_def(self, o: FuncDef) -> None: if not self.visit(o): return super().visit_func_def(o) def visit_overloaded_func_def(self, o: OverloadedFuncDef) -> None: if not self.visit(o): return super().visit_overloaded_func_def(o) def visit_class_def(self, o: ClassDef) -> None: if not self.visit(o): return super().visit_class_def(o) def visit_global_decl(self, o: GlobalDecl) -> None: if not self.visit(o): return super().visit_global_decl(o) def visit_nonlocal_decl(self, o: NonlocalDecl) -> None: if not self.visit(o): return super().visit_nonlocal_decl(o) def visit_decorator(self, o: Decorator) -> None: if not self.visit(o): return super().visit_decorator(o) def visit_type_alias(self, o: TypeAlias) -> None: if not self.visit(o): return super().visit_type_alias(o) # Statements def visit_block(self, block: Block) -> None: if not self.visit(block): return super().visit_block(block) def visit_expression_stmt(self, o: ExpressionStmt) -> None: if not self.visit(o): return super().visit_expression_stmt(o) def visit_assignment_stmt(self, o: AssignmentStmt) -> None: if not self.visit(o): return super().visit_assignment_stmt(o) def visit_operator_assignment_stmt(self, o: OperatorAssignmentStmt) -> None: if not self.visit(o): return super().visit_operator_assignment_stmt(o) def visit_while_stmt(self, o: WhileStmt) -> None: if not self.visit(o): return super().visit_while_stmt(o) def visit_for_stmt(self, o: ForStmt) -> None: if not self.visit(o): return super().visit_for_stmt(o) def visit_return_stmt(self, o: ReturnStmt) -> None: if not self.visit(o): return super().visit_return_stmt(o) def visit_assert_stmt(self, o: AssertStmt) -> None: if not self.visit(o): return super().visit_assert_stmt(o) def visit_del_stmt(self, o: DelStmt) -> None: if not self.visit(o): return super().visit_del_stmt(o) def visit_if_stmt(self, o: IfStmt) -> None: if not self.visit(o): return super().visit_if_stmt(o) def visit_break_stmt(self, o: BreakStmt) -> None: if not self.visit(o): return super().visit_break_stmt(o) def visit_continue_stmt(self, o: ContinueStmt) -> None: if not self.visit(o): return super().visit_continue_stmt(o) def visit_pass_stmt(self, o: PassStmt) -> None: if not self.visit(o): return super().visit_pass_stmt(o) def visit_raise_stmt(self, o: RaiseStmt) -> None: if not self.visit(o): return super().visit_raise_stmt(o) def visit_try_stmt(self, o: TryStmt) -> None: if not self.visit(o): return super().visit_try_stmt(o) def visit_with_stmt(self, o: WithStmt) -> None: if not self.visit(o): return super().visit_with_stmt(o) def visit_match_stmt(self, o: MatchStmt) -> None: if not self.visit(o): return super().visit_match_stmt(o) # Expressions (default no-op implementation) def visit_int_expr(self, o: IntExpr) -> None: if not self.visit(o): return super().visit_int_expr(o) def visit_str_expr(self, o: StrExpr) -> None: if not self.visit(o): return super().visit_str_expr(o) def visit_bytes_expr(self, o: BytesExpr) -> None: if not self.visit(o): return super().visit_bytes_expr(o) def visit_float_expr(self, o: FloatExpr) -> None: if not self.visit(o): return super().visit_float_expr(o) def visit_complex_expr(self, o: ComplexExpr) -> None: if not self.visit(o): return super().visit_complex_expr(o) def visit_ellipsis(self, o: EllipsisExpr) -> None: if not self.visit(o): return super().visit_ellipsis(o) def visit_star_expr(self, o: StarExpr) -> None: if not self.visit(o): return super().visit_star_expr(o) def visit_name_expr(self, o: NameExpr) -> None: if not self.visit(o): return super().visit_name_expr(o) def visit_member_expr(self, o: MemberExpr) -> None: if not self.visit(o): return super().visit_member_expr(o) def visit_yield_from_expr(self, o: YieldFromExpr) -> None: if not self.visit(o): return super().visit_yield_from_expr(o) def visit_yield_expr(self, o: YieldExpr) -> None: if not self.visit(o): return super().visit_yield_expr(o) def visit_call_expr(self, o: CallExpr) -> None: if not self.visit(o): return super().visit_call_expr(o) def visit_op_expr(self, o: OpExpr) -> None: if not self.visit(o): return super().visit_op_expr(o) def visit_comparison_expr(self, o: ComparisonExpr) -> None: if not self.visit(o): return super().visit_comparison_expr(o) def visit_cast_expr(self, o: CastExpr) -> None: if not self.visit(o): return super().visit_cast_expr(o) def visit_assert_type_expr(self, o: AssertTypeExpr) -> None: if not self.visit(o): return super().visit_assert_type_expr(o) def visit_reveal_expr(self, o: RevealExpr) -> None: if not self.visit(o): return super().visit_reveal_expr(o) def visit_super_expr(self, o: SuperExpr) -> None: if not self.visit(o): return super().visit_super_expr(o) def visit_assignment_expr(self, o: AssignmentExpr) -> None: if not self.visit(o): return super().visit_assignment_expr(o) def visit_unary_expr(self, o: UnaryExpr) -> None: if not self.visit(o): return super().visit_unary_expr(o) def visit_list_expr(self, o: ListExpr) -> None: if not self.visit(o): return super().visit_list_expr(o) def visit_dict_expr(self, o: DictExpr) -> None: if not self.visit(o): return super().visit_dict_expr(o) def visit_tuple_expr(self, o: TupleExpr) -> None: if not self.visit(o): return super().visit_tuple_expr(o) def visit_set_expr(self, o: SetExpr) -> None: if not self.visit(o): return super().visit_set_expr(o) def visit_index_expr(self, o: IndexExpr) -> None: if not self.visit(o): return super().visit_index_expr(o) def visit_type_application(self, o: TypeApplication) -> None: if not self.visit(o): return super().visit_type_application(o) def visit_lambda_expr(self, o: LambdaExpr) -> None: if not self.visit(o): return super().visit_lambda_expr(o) def visit_list_comprehension(self, o: ListComprehension) -> None: if not self.visit(o): return super().visit_list_comprehension(o) def visit_set_comprehension(self, o: SetComprehension) -> None: if not self.visit(o): return super().visit_set_comprehension(o) def visit_dictionary_comprehension(self, o: DictionaryComprehension) -> None: if not self.visit(o): return super().visit_dictionary_comprehension(o) def visit_generator_expr(self, o: GeneratorExpr) -> None: if not self.visit(o): return super().visit_generator_expr(o) def visit_slice_expr(self, o: SliceExpr) -> None: if not self.visit(o): return super().visit_slice_expr(o) def visit_conditional_expr(self, o: ConditionalExpr) -> None: if not self.visit(o): return super().visit_conditional_expr(o) def visit_type_var_expr(self, o: TypeVarExpr) -> None: if not self.visit(o): return super().visit_type_var_expr(o) def visit_paramspec_expr(self, o: ParamSpecExpr) -> None: if not self.visit(o): return super().visit_paramspec_expr(o) def visit_type_var_tuple_expr(self, o: TypeVarTupleExpr) -> None: if not self.visit(o): return super().visit_type_var_tuple_expr(o) def visit_type_alias_expr(self, o: TypeAliasExpr) -> None: if not self.visit(o): return super().visit_type_alias_expr(o) def visit_namedtuple_expr(self, o: NamedTupleExpr) -> None: if not self.visit(o): return super().visit_namedtuple_expr(o) def visit_enum_call_expr(self, o: EnumCallExpr) -> None: if not self.visit(o): return super().visit_enum_call_expr(o) def visit_typeddict_expr(self, o: TypedDictExpr) -> None: if not self.visit(o): return super().visit_typeddict_expr(o) def visit_newtype_expr(self, o: NewTypeExpr) -> None: if not self.visit(o): return super().visit_newtype_expr(o) def visit_await_expr(self, o: AwaitExpr) -> None: if not self.visit(o): return super().visit_await_expr(o) # Patterns def visit_as_pattern(self, o: AsPattern) -> None: if not self.visit(o): return super().visit_as_pattern(o) def visit_or_pattern(self, o: OrPattern) -> None: if not self.visit(o): return super().visit_or_pattern(o) def visit_value_pattern(self, o: ValuePattern) -> None: if not self.visit(o): return super().visit_value_pattern(o) def visit_singleton_pattern(self, o: SingletonPattern) -> None: if not self.visit(o): return super().visit_singleton_pattern(o) def visit_sequence_pattern(self, o: SequencePattern) -> None: if not self.visit(o): return super().visit_sequence_pattern(o) def visit_starred_pattern(self, o: StarredPattern) -> None: if not self.visit(o): return super().visit_starred_pattern(o) def visit_mapping_pattern(self, o: MappingPattern) -> None: if not self.visit(o): return super().visit_mapping_pattern(o) def visit_class_pattern(self, o: ClassPattern) -> None: if not self.visit(o): return super().visit_class_pattern(o) class ReturnSeeker(TraverserVisitor): def __init__(self) -> None: self.found = False def visit_return_stmt(self, o: ReturnStmt) -> None: if o.expr is None or isinstance(o.expr, NameExpr) and o.expr.name == "None": return self.found = True def has_return_statement(fdef: FuncBase) -> bool: """Find if a function has a non-trivial return statement. Plain 'return' and 'return None' don't count. """ seeker = ReturnSeeker() fdef.accept(seeker) return seeker.found class FuncCollectorBase(TraverserVisitor): def __init__(self) -> None: self.inside_func = False def visit_func_def(self, defn: FuncDef) -> None: if not self.inside_func: self.inside_func = True super().visit_func_def(defn) self.inside_func = False class YieldSeeker(FuncCollectorBase): def __init__(self) -> None: super().__init__() self.found = False def visit_yield_expr(self, o: YieldExpr) -> None: self.found = True def has_yield_expression(fdef: FuncBase) -> bool: seeker = YieldSeeker() fdef.accept(seeker) return seeker.found class YieldFromSeeker(FuncCollectorBase): def __init__(self) -> None: super().__init__() self.found = False def visit_yield_from_expr(self, o: YieldFromExpr) -> None: self.found = True def has_yield_from_expression(fdef: FuncBase) -> bool: seeker = YieldFromSeeker() fdef.accept(seeker) return seeker.found class AwaitSeeker(TraverserVisitor): def __init__(self) -> None: super().__init__() self.found = False def visit_await_expr(self, o: AwaitExpr) -> None: self.found = True def has_await_expression(expr: Expression) -> bool: seeker = AwaitSeeker() expr.accept(seeker) return seeker.found class ReturnCollector(FuncCollectorBase): def __init__(self) -> None: super().__init__() self.return_statements: list[ReturnStmt] = [] def visit_return_stmt(self, stmt: ReturnStmt) -> None: self.return_statements.append(stmt) def all_return_statements(node: Node) -> list[ReturnStmt]: v = ReturnCollector() node.accept(v) return v.return_statements class YieldCollector(FuncCollectorBase): def __init__(self) -> None: super().__init__() self.in_assignment = False self.yield_expressions: list[tuple[YieldExpr, bool]] = [] def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: self.in_assignment = True super().visit_assignment_stmt(stmt) self.in_assignment = False def visit_yield_expr(self, expr: YieldExpr) -> None: self.yield_expressions.append((expr, self.in_assignment)) def all_yield_expressions(node: Node) -> list[tuple[YieldExpr, bool]]: v = YieldCollector() node.accept(v) return v.yield_expressions class YieldFromCollector(FuncCollectorBase): def __init__(self) -> None: super().__init__() self.in_assignment = False self.yield_from_expressions: list[tuple[YieldFromExpr, bool]] = [] def visit_assignment_stmt(self, stmt: AssignmentStmt) -> None: self.in_assignment = True super().visit_assignment_stmt(stmt) self.in_assignment = False def visit_yield_from_expr(self, expr: YieldFromExpr) -> None: self.yield_from_expressions.append((expr, self.in_assignment)) def all_yield_from_expressions(node: Node) -> list[tuple[YieldFromExpr, bool]]: v = YieldFromCollector() node.accept(v) return v.yield_from_expressions
algorandfoundation/puya
src/puyapy/_vendor/mypy/traverser.py
Python
NOASSERTION
27,003
"""Base visitor that implements an identity AST transform. Subclass TransformVisitor to perform non-trivial transformations. """ from __future__ import annotations from typing import Iterable, Optional, cast from mypy.nodes import ( GDEF, REVEAL_TYPE, Argument, AssertStmt, AssertTypeExpr, AssignmentExpr, AssignmentStmt, AwaitExpr, Block, BreakStmt, BytesExpr, CallExpr, CastExpr, ClassDef, ComparisonExpr, ComplexExpr, ConditionalExpr, ContinueStmt, Decorator, DelStmt, DictExpr, DictionaryComprehension, EllipsisExpr, EnumCallExpr, Expression, ExpressionStmt, FloatExpr, ForStmt, FuncDef, FuncItem, GeneratorExpr, GlobalDecl, IfStmt, Import, ImportAll, ImportFrom, IndexExpr, IntExpr, LambdaExpr, ListComprehension, ListExpr, MatchStmt, MemberExpr, MypyFile, NamedTupleExpr, NameExpr, NewTypeExpr, Node, NonlocalDecl, OperatorAssignmentStmt, OpExpr, OverloadedFuncDef, OverloadPart, ParamSpecExpr, PassStmt, PromoteExpr, RaiseStmt, RefExpr, ReturnStmt, RevealExpr, SetComprehension, SetExpr, SliceExpr, StarExpr, Statement, StrExpr, SuperExpr, SymbolTable, TempNode, TryStmt, TupleExpr, TypeAliasExpr, TypeApplication, TypedDictExpr, TypeVarExpr, TypeVarTupleExpr, UnaryExpr, Var, WhileStmt, WithStmt, YieldExpr, YieldFromExpr, ) from mypy.patterns import ( AsPattern, ClassPattern, MappingPattern, OrPattern, Pattern, SequencePattern, SingletonPattern, StarredPattern, ValuePattern, ) from mypy.traverser import TraverserVisitor from mypy.types import FunctionLike, ProperType, Type from mypy.util import replace_object_state from mypy.visitor import NodeVisitor class TransformVisitor(NodeVisitor[Node]): """Transform a semantically analyzed AST (or subtree) to an identical copy. Use the node() method to transform an AST node. Subclass to perform a non-identity transform. Notes: * This can only be used to transform functions or classes, not top-level statements, and/or modules as a whole. * Do not duplicate TypeInfo nodes. This would generally not be desirable. * Only update some name binding cross-references, but only those that refer to Var, Decorator or FuncDef nodes, not those targeting ClassDef or TypeInfo nodes. * Types are not transformed, but you can override type() to also perform type transformation. TODO nested classes and functions have not been tested well enough """ def __init__(self) -> None: # To simplify testing, set this flag to True if you want to transform # all statements in a file (this is prohibited in normal mode). self.test_only = False # There may be multiple references to a Var node. Keep track of # Var translations using a dictionary. self.var_map: dict[Var, Var] = {} # These are uninitialized placeholder nodes used temporarily for nested # functions while we are transforming a top-level function. This maps an # untransformed node to a placeholder (which will later become the # transformed node). self.func_placeholder_map: dict[FuncDef, FuncDef] = {} def visit_mypy_file(self, node: MypyFile) -> MypyFile: assert self.test_only, "This visitor should not be used for whole files." # NOTE: The 'names' and 'imports' instance variables will be empty! ignored_lines = {line: codes.copy() for line, codes in node.ignored_lines.items()} new = MypyFile(self.statements(node.defs), [], node.is_bom, ignored_lines=ignored_lines) new._fullname = node._fullname new.path = node.path new.names = SymbolTable() return new def visit_import(self, node: Import) -> Import: return Import(node.ids.copy()) def visit_import_from(self, node: ImportFrom) -> ImportFrom: return ImportFrom(node.id, node.relative, node.names.copy()) def visit_import_all(self, node: ImportAll) -> ImportAll: return ImportAll(node.id, node.relative) def copy_argument(self, argument: Argument) -> Argument: arg = Argument( self.visit_var(argument.variable), argument.type_annotation, argument.initializer, argument.kind, ) # Refresh lines of the inner things arg.set_line(argument) return arg def visit_func_def(self, node: FuncDef) -> FuncDef: # Note that a FuncDef must be transformed to a FuncDef. # These contortions are needed to handle the case of recursive # references inside the function being transformed. # Set up placeholder nodes for references within this function # to other functions defined inside it. # Don't create an entry for this function itself though, # since we want self-references to point to the original # function if this is the top-level node we are transforming. init = FuncMapInitializer(self) for stmt in node.body.body: stmt.accept(init) new = FuncDef( node.name, [self.copy_argument(arg) for arg in node.arguments], self.block(node.body), cast(Optional[FunctionLike], self.optional_type(node.type)), ) self.copy_function_attributes(new, node) new._fullname = node._fullname new.is_decorated = node.is_decorated new.is_conditional = node.is_conditional new.abstract_status = node.abstract_status new.is_static = node.is_static new.is_class = node.is_class new.is_property = node.is_property new.is_final = node.is_final new.original_def = node.original_def if node in self.func_placeholder_map: # There is a placeholder definition for this function. Replace # the attributes of the placeholder with those form the transformed # function. We know that the classes will be identical (otherwise # this wouldn't work). result = self.func_placeholder_map[node] replace_object_state(result, new) return result else: return new def visit_lambda_expr(self, node: LambdaExpr) -> LambdaExpr: new = LambdaExpr( [self.copy_argument(arg) for arg in node.arguments], self.block(node.body), cast(Optional[FunctionLike], self.optional_type(node.type)), ) self.copy_function_attributes(new, node) return new def copy_function_attributes(self, new: FuncItem, original: FuncItem) -> None: new.info = original.info new.min_args = original.min_args new.max_pos = original.max_pos new.is_overload = original.is_overload new.is_generator = original.is_generator new.is_coroutine = original.is_coroutine new.is_async_generator = original.is_async_generator new.is_awaitable_coroutine = original.is_awaitable_coroutine new.line = original.line def visit_overloaded_func_def(self, node: OverloadedFuncDef) -> OverloadedFuncDef: items = [cast(OverloadPart, item.accept(self)) for item in node.items] for newitem, olditem in zip(items, node.items): newitem.line = olditem.line new = OverloadedFuncDef(items) new._fullname = node._fullname new_type = self.optional_type(node.type) assert isinstance(new_type, ProperType) new.type = new_type new.info = node.info new.is_static = node.is_static new.is_class = node.is_class new.is_property = node.is_property new.is_final = node.is_final if node.impl: new.impl = cast(OverloadPart, node.impl.accept(self)) return new def visit_class_def(self, node: ClassDef) -> ClassDef: new = ClassDef( node.name, self.block(node.defs), node.type_vars, self.expressions(node.base_type_exprs), self.optional_expr(node.metaclass), ) new.fullname = node.fullname new.info = node.info new.decorators = [self.expr(decorator) for decorator in node.decorators] return new def visit_global_decl(self, node: GlobalDecl) -> GlobalDecl: return GlobalDecl(node.names.copy()) def visit_nonlocal_decl(self, node: NonlocalDecl) -> NonlocalDecl: return NonlocalDecl(node.names.copy()) def visit_block(self, node: Block) -> Block: return Block(self.statements(node.body)) def visit_decorator(self, node: Decorator) -> Decorator: # Note that a Decorator must be transformed to a Decorator. func = self.visit_func_def(node.func) func.line = node.func.line new = Decorator(func, self.expressions(node.decorators), self.visit_var(node.var)) new.is_overload = node.is_overload return new def visit_var(self, node: Var) -> Var: # Note that a Var must be transformed to a Var. if node in self.var_map: return self.var_map[node] new = Var(node.name, self.optional_type(node.type)) new.line = node.line new._fullname = node._fullname new.info = node.info new.is_self = node.is_self new.is_ready = node.is_ready new.is_initialized_in_class = node.is_initialized_in_class new.is_staticmethod = node.is_staticmethod new.is_classmethod = node.is_classmethod new.is_property = node.is_property new.is_final = node.is_final new.final_value = node.final_value new.final_unset_in_class = node.final_unset_in_class new.final_set_in_init = node.final_set_in_init new.set_line(node) self.var_map[node] = new return new def visit_expression_stmt(self, node: ExpressionStmt) -> ExpressionStmt: return ExpressionStmt(self.expr(node.expr)) def visit_assignment_stmt(self, node: AssignmentStmt) -> AssignmentStmt: return self.duplicate_assignment(node) def duplicate_assignment(self, node: AssignmentStmt) -> AssignmentStmt: new = AssignmentStmt( self.expressions(node.lvalues), self.expr(node.rvalue), self.optional_type(node.unanalyzed_type), ) new.line = node.line new.is_final_def = node.is_final_def new.type = self.optional_type(node.type) return new def visit_operator_assignment_stmt( self, node: OperatorAssignmentStmt ) -> OperatorAssignmentStmt: return OperatorAssignmentStmt(node.op, self.expr(node.lvalue), self.expr(node.rvalue)) def visit_while_stmt(self, node: WhileStmt) -> WhileStmt: return WhileStmt( self.expr(node.expr), self.block(node.body), self.optional_block(node.else_body) ) def visit_for_stmt(self, node: ForStmt) -> ForStmt: new = ForStmt( self.expr(node.index), self.expr(node.expr), self.block(node.body), self.optional_block(node.else_body), self.optional_type(node.unanalyzed_index_type), ) new.is_async = node.is_async new.index_type = self.optional_type(node.index_type) return new def visit_return_stmt(self, node: ReturnStmt) -> ReturnStmt: return ReturnStmt(self.optional_expr(node.expr)) def visit_assert_stmt(self, node: AssertStmt) -> AssertStmt: return AssertStmt(self.expr(node.expr), self.optional_expr(node.msg)) def visit_del_stmt(self, node: DelStmt) -> DelStmt: return DelStmt(self.expr(node.expr)) def visit_if_stmt(self, node: IfStmt) -> IfStmt: return IfStmt( self.expressions(node.expr), self.blocks(node.body), self.optional_block(node.else_body), ) def visit_break_stmt(self, node: BreakStmt) -> BreakStmt: return BreakStmt() def visit_continue_stmt(self, node: ContinueStmt) -> ContinueStmt: return ContinueStmt() def visit_pass_stmt(self, node: PassStmt) -> PassStmt: return PassStmt() def visit_raise_stmt(self, node: RaiseStmt) -> RaiseStmt: return RaiseStmt(self.optional_expr(node.expr), self.optional_expr(node.from_expr)) def visit_try_stmt(self, node: TryStmt) -> TryStmt: new = TryStmt( self.block(node.body), self.optional_names(node.vars), self.optional_expressions(node.types), self.blocks(node.handlers), self.optional_block(node.else_body), self.optional_block(node.finally_body), ) new.is_star = node.is_star return new def visit_with_stmt(self, node: WithStmt) -> WithStmt: new = WithStmt( self.expressions(node.expr), self.optional_expressions(node.target), self.block(node.body), self.optional_type(node.unanalyzed_type), ) new.is_async = node.is_async new.analyzed_types = [self.type(typ) for typ in node.analyzed_types] return new def visit_as_pattern(self, p: AsPattern) -> AsPattern: return AsPattern( pattern=self.pattern(p.pattern) if p.pattern is not None else None, name=self.duplicate_name(p.name) if p.name is not None else None, ) def visit_or_pattern(self, p: OrPattern) -> OrPattern: return OrPattern([self.pattern(pat) for pat in p.patterns]) def visit_value_pattern(self, p: ValuePattern) -> ValuePattern: return ValuePattern(self.expr(p.expr)) def visit_singleton_pattern(self, p: SingletonPattern) -> SingletonPattern: return SingletonPattern(p.value) def visit_sequence_pattern(self, p: SequencePattern) -> SequencePattern: return SequencePattern([self.pattern(pat) for pat in p.patterns]) def visit_starred_pattern(self, p: StarredPattern) -> StarredPattern: return StarredPattern(self.duplicate_name(p.capture) if p.capture is not None else None) def visit_mapping_pattern(self, p: MappingPattern) -> MappingPattern: return MappingPattern( keys=[self.expr(expr) for expr in p.keys], values=[self.pattern(pat) for pat in p.values], rest=self.duplicate_name(p.rest) if p.rest is not None else None, ) def visit_class_pattern(self, p: ClassPattern) -> ClassPattern: class_ref = p.class_ref.accept(self) assert isinstance(class_ref, RefExpr) return ClassPattern( class_ref=class_ref, positionals=[self.pattern(pat) for pat in p.positionals], keyword_keys=list(p.keyword_keys), keyword_values=[self.pattern(pat) for pat in p.keyword_values], ) def visit_match_stmt(self, o: MatchStmt) -> MatchStmt: return MatchStmt( subject=self.expr(o.subject), patterns=[self.pattern(p) for p in o.patterns], guards=self.optional_expressions(o.guards), bodies=self.blocks(o.bodies), ) def visit_star_expr(self, node: StarExpr) -> StarExpr: return StarExpr(node.expr) def visit_int_expr(self, node: IntExpr) -> IntExpr: return IntExpr(node.value) def visit_str_expr(self, node: StrExpr) -> StrExpr: return StrExpr(node.value) def visit_bytes_expr(self, node: BytesExpr) -> BytesExpr: return BytesExpr(node.value) def visit_float_expr(self, node: FloatExpr) -> FloatExpr: return FloatExpr(node.value) def visit_complex_expr(self, node: ComplexExpr) -> ComplexExpr: return ComplexExpr(node.value) def visit_ellipsis(self, node: EllipsisExpr) -> EllipsisExpr: return EllipsisExpr() def visit_name_expr(self, node: NameExpr) -> NameExpr: return self.duplicate_name(node) def duplicate_name(self, node: NameExpr) -> NameExpr: # This method is used when the transform result must be a NameExpr. # visit_name_expr() is used when there is no such restriction. new = NameExpr(node.name) self.copy_ref(new, node) new.is_special_form = node.is_special_form return new def visit_member_expr(self, node: MemberExpr) -> MemberExpr: member = MemberExpr(self.expr(node.expr), node.name) if node.def_var: # This refers to an attribute and we don't transform attributes by default, # just normal variables. member.def_var = node.def_var self.copy_ref(member, node) return member def copy_ref(self, new: RefExpr, original: RefExpr) -> None: new.kind = original.kind new.fullname = original.fullname target = original.node if isinstance(target, Var): # Do not transform references to global variables. See # testGenericFunctionAliasExpand for an example where this is important. if original.kind != GDEF: target = self.visit_var(target) elif isinstance(target, Decorator): target = self.visit_var(target.var) elif isinstance(target, FuncDef): # Use a placeholder node for the function if it exists. target = self.func_placeholder_map.get(target, target) new.node = target new.is_new_def = original.is_new_def new.is_inferred_def = original.is_inferred_def def visit_yield_from_expr(self, node: YieldFromExpr) -> YieldFromExpr: return YieldFromExpr(self.expr(node.expr)) def visit_yield_expr(self, node: YieldExpr) -> YieldExpr: return YieldExpr(self.optional_expr(node.expr)) def visit_await_expr(self, node: AwaitExpr) -> AwaitExpr: return AwaitExpr(self.expr(node.expr)) def visit_call_expr(self, node: CallExpr) -> CallExpr: return CallExpr( self.expr(node.callee), self.expressions(node.args), node.arg_kinds.copy(), node.arg_names.copy(), self.optional_expr(node.analyzed), ) def visit_op_expr(self, node: OpExpr) -> OpExpr: new = OpExpr( node.op, self.expr(node.left), self.expr(node.right), cast(Optional[TypeAliasExpr], self.optional_expr(node.analyzed)), ) new.method_type = self.optional_type(node.method_type) return new def visit_comparison_expr(self, node: ComparisonExpr) -> ComparisonExpr: new = ComparisonExpr(node.operators, self.expressions(node.operands)) new.method_types = [self.optional_type(t) for t in node.method_types] return new def visit_cast_expr(self, node: CastExpr) -> CastExpr: return CastExpr(self.expr(node.expr), self.type(node.type)) def visit_assert_type_expr(self, node: AssertTypeExpr) -> AssertTypeExpr: return AssertTypeExpr(self.expr(node.expr), self.type(node.type)) def visit_reveal_expr(self, node: RevealExpr) -> RevealExpr: if node.kind == REVEAL_TYPE: assert node.expr is not None return RevealExpr(kind=REVEAL_TYPE, expr=self.expr(node.expr)) else: # Reveal locals expressions don't have any sub expressions return node def visit_super_expr(self, node: SuperExpr) -> SuperExpr: call = self.expr(node.call) assert isinstance(call, CallExpr) new = SuperExpr(node.name, call) new.info = node.info return new def visit_assignment_expr(self, node: AssignmentExpr) -> AssignmentExpr: return AssignmentExpr(self.expr(node.target), self.expr(node.value)) def visit_unary_expr(self, node: UnaryExpr) -> UnaryExpr: new = UnaryExpr(node.op, self.expr(node.expr)) new.method_type = self.optional_type(node.method_type) return new def visit_list_expr(self, node: ListExpr) -> ListExpr: return ListExpr(self.expressions(node.items)) def visit_dict_expr(self, node: DictExpr) -> DictExpr: return DictExpr( [(self.expr(key) if key else None, self.expr(value)) for key, value in node.items] ) def visit_tuple_expr(self, node: TupleExpr) -> TupleExpr: return TupleExpr(self.expressions(node.items)) def visit_set_expr(self, node: SetExpr) -> SetExpr: return SetExpr(self.expressions(node.items)) def visit_index_expr(self, node: IndexExpr) -> IndexExpr: new = IndexExpr(self.expr(node.base), self.expr(node.index)) if node.method_type: new.method_type = self.type(node.method_type) if node.analyzed: if isinstance(node.analyzed, TypeApplication): new.analyzed = self.visit_type_application(node.analyzed) else: new.analyzed = self.visit_type_alias_expr(node.analyzed) new.analyzed.set_line(node.analyzed) return new def visit_type_application(self, node: TypeApplication) -> TypeApplication: return TypeApplication(self.expr(node.expr), self.types(node.types)) def visit_list_comprehension(self, node: ListComprehension) -> ListComprehension: generator = self.duplicate_generator(node.generator) generator.set_line(node.generator) return ListComprehension(generator) def visit_set_comprehension(self, node: SetComprehension) -> SetComprehension: generator = self.duplicate_generator(node.generator) generator.set_line(node.generator) return SetComprehension(generator) def visit_dictionary_comprehension( self, node: DictionaryComprehension ) -> DictionaryComprehension: return DictionaryComprehension( self.expr(node.key), self.expr(node.value), [self.expr(index) for index in node.indices], [self.expr(s) for s in node.sequences], [[self.expr(cond) for cond in conditions] for conditions in node.condlists], node.is_async, ) def visit_generator_expr(self, node: GeneratorExpr) -> GeneratorExpr: return self.duplicate_generator(node) def duplicate_generator(self, node: GeneratorExpr) -> GeneratorExpr: return GeneratorExpr( self.expr(node.left_expr), [self.expr(index) for index in node.indices], [self.expr(s) for s in node.sequences], [[self.expr(cond) for cond in conditions] for conditions in node.condlists], node.is_async, ) def visit_slice_expr(self, node: SliceExpr) -> SliceExpr: return SliceExpr( self.optional_expr(node.begin_index), self.optional_expr(node.end_index), self.optional_expr(node.stride), ) def visit_conditional_expr(self, node: ConditionalExpr) -> ConditionalExpr: return ConditionalExpr( self.expr(node.cond), self.expr(node.if_expr), self.expr(node.else_expr) ) def visit_type_var_expr(self, node: TypeVarExpr) -> TypeVarExpr: return TypeVarExpr( node.name, node.fullname, self.types(node.values), self.type(node.upper_bound), self.type(node.default), variance=node.variance, ) def visit_paramspec_expr(self, node: ParamSpecExpr) -> ParamSpecExpr: return ParamSpecExpr( node.name, node.fullname, self.type(node.upper_bound), self.type(node.default), variance=node.variance, ) def visit_type_var_tuple_expr(self, node: TypeVarTupleExpr) -> TypeVarTupleExpr: return TypeVarTupleExpr( node.name, node.fullname, self.type(node.upper_bound), node.tuple_fallback, self.type(node.default), variance=node.variance, ) def visit_type_alias_expr(self, node: TypeAliasExpr) -> TypeAliasExpr: return TypeAliasExpr(node.node) def visit_newtype_expr(self, node: NewTypeExpr) -> NewTypeExpr: res = NewTypeExpr(node.name, node.old_type, line=node.line, column=node.column) res.info = node.info return res def visit_namedtuple_expr(self, node: NamedTupleExpr) -> NamedTupleExpr: return NamedTupleExpr(node.info) def visit_enum_call_expr(self, node: EnumCallExpr) -> EnumCallExpr: return EnumCallExpr(node.info, node.items, node.values) def visit_typeddict_expr(self, node: TypedDictExpr) -> Node: return TypedDictExpr(node.info) def visit__promote_expr(self, node: PromoteExpr) -> PromoteExpr: return PromoteExpr(node.type) def visit_temp_node(self, node: TempNode) -> TempNode: return TempNode(self.type(node.type)) def node(self, node: Node) -> Node: new = node.accept(self) new.set_line(node) return new def mypyfile(self, node: MypyFile) -> MypyFile: new = node.accept(self) assert isinstance(new, MypyFile) new.set_line(node) return new def expr(self, expr: Expression) -> Expression: new = expr.accept(self) assert isinstance(new, Expression) new.set_line(expr) return new def stmt(self, stmt: Statement) -> Statement: new = stmt.accept(self) assert isinstance(new, Statement) new.set_line(stmt) return new def pattern(self, pattern: Pattern) -> Pattern: new = pattern.accept(self) assert isinstance(new, Pattern) new.set_line(pattern) return new # Helpers # # All the node helpers also propagate line numbers. def optional_expr(self, expr: Expression | None) -> Expression | None: if expr: return self.expr(expr) else: return None def block(self, block: Block) -> Block: new = self.visit_block(block) new.line = block.line return new def optional_block(self, block: Block | None) -> Block | None: if block: return self.block(block) else: return None def statements(self, statements: list[Statement]) -> list[Statement]: return [self.stmt(stmt) for stmt in statements] def expressions(self, expressions: list[Expression]) -> list[Expression]: return [self.expr(expr) for expr in expressions] def optional_expressions( self, expressions: Iterable[Expression | None] ) -> list[Expression | None]: return [self.optional_expr(expr) for expr in expressions] def blocks(self, blocks: list[Block]) -> list[Block]: return [self.block(block) for block in blocks] def names(self, names: list[NameExpr]) -> list[NameExpr]: return [self.duplicate_name(name) for name in names] def optional_names(self, names: Iterable[NameExpr | None]) -> list[NameExpr | None]: result: list[NameExpr | None] = [] for name in names: if name: result.append(self.duplicate_name(name)) else: result.append(None) return result def type(self, type: Type) -> Type: # Override this method to transform types. return type def optional_type(self, type: Type | None) -> Type | None: if type: return self.type(type) else: return None def types(self, types: list[Type]) -> list[Type]: return [self.type(type) for type in types] class FuncMapInitializer(TraverserVisitor): """This traverser creates mappings from nested FuncDefs to placeholder FuncDefs. The placeholders will later be replaced with transformed nodes. """ def __init__(self, transformer: TransformVisitor) -> None: self.transformer = transformer def visit_func_def(self, node: FuncDef) -> None: if node not in self.transformer.func_placeholder_map: # Haven't seen this FuncDef before, so create a placeholder node. self.transformer.func_placeholder_map[node] = FuncDef( node.name, node.arguments, node.body, None ) super().visit_func_def(node)
algorandfoundation/puya
src/puyapy/_vendor/mypy/treetransform.py
Python
NOASSERTION
28,486
from __future__ import annotations from mypy.nodes import ( ParamSpecExpr, SymbolTableNode, TypeVarExpr, TypeVarLikeExpr, TypeVarTupleExpr, ) from mypy.types import ( ParamSpecFlavor, ParamSpecType, TypeVarId, TypeVarLikeType, TypeVarTupleType, TypeVarType, ) from mypy.typetraverser import TypeTraverserVisitor class TypeVarLikeNamespaceSetter(TypeTraverserVisitor): """Set namespace for all TypeVarLikeTypes types.""" def __init__(self, namespace: str) -> None: self.namespace = namespace def visit_type_var(self, t: TypeVarType) -> None: t.id.namespace = self.namespace super().visit_type_var(t) def visit_param_spec(self, t: ParamSpecType) -> None: t.id.namespace = self.namespace return super().visit_param_spec(t) def visit_type_var_tuple(self, t: TypeVarTupleType) -> None: t.id.namespace = self.namespace super().visit_type_var_tuple(t) class TypeVarLikeScope: """Scope that holds bindings for type variables and parameter specifications. Node fullname -> TypeVarLikeType. """ def __init__( self, parent: TypeVarLikeScope | None = None, is_class_scope: bool = False, prohibited: TypeVarLikeScope | None = None, namespace: str = "", ) -> None: """Initializer for TypeVarLikeScope Parameters: parent: the outer scope for this scope is_class_scope: True if this represents a generic class prohibited: Type variables that aren't strictly in scope exactly, but can't be bound because they're part of an outer class's scope. """ self.scope: dict[str, TypeVarLikeType] = {} self.parent = parent self.func_id = 0 self.class_id = 0 self.is_class_scope = is_class_scope self.prohibited = prohibited self.namespace = namespace if parent is not None: self.func_id = parent.func_id self.class_id = parent.class_id def get_function_scope(self) -> TypeVarLikeScope | None: """Get the nearest parent that's a function scope, not a class scope""" it: TypeVarLikeScope | None = self while it is not None and it.is_class_scope: it = it.parent return it def allow_binding(self, fullname: str) -> bool: if fullname in self.scope: return False elif self.parent and not self.parent.allow_binding(fullname): return False elif self.prohibited and not self.prohibited.allow_binding(fullname): return False return True def method_frame(self, namespace: str) -> TypeVarLikeScope: """A new scope frame for binding a method""" return TypeVarLikeScope(self, False, None, namespace=namespace) def class_frame(self, namespace: str) -> TypeVarLikeScope: """A new scope frame for binding a class. Prohibits *this* class's tvars""" return TypeVarLikeScope(self.get_function_scope(), True, self, namespace=namespace) def new_unique_func_id(self) -> TypeVarId: """Used by plugin-like code that needs to make synthetic generic functions.""" self.func_id -= 1 return TypeVarId(self.func_id) def bind_new(self, name: str, tvar_expr: TypeVarLikeExpr) -> TypeVarLikeType: if self.is_class_scope: self.class_id += 1 i = self.class_id else: self.func_id -= 1 i = self.func_id namespace = self.namespace tvar_expr.default.accept(TypeVarLikeNamespaceSetter(namespace)) if isinstance(tvar_expr, TypeVarExpr): tvar_def: TypeVarLikeType = TypeVarType( name=name, fullname=tvar_expr.fullname, id=TypeVarId(i, namespace=namespace), values=tvar_expr.values, upper_bound=tvar_expr.upper_bound, default=tvar_expr.default, variance=tvar_expr.variance, line=tvar_expr.line, column=tvar_expr.column, ) elif isinstance(tvar_expr, ParamSpecExpr): tvar_def = ParamSpecType( name=name, fullname=tvar_expr.fullname, id=TypeVarId(i, namespace=namespace), flavor=ParamSpecFlavor.BARE, upper_bound=tvar_expr.upper_bound, default=tvar_expr.default, line=tvar_expr.line, column=tvar_expr.column, ) elif isinstance(tvar_expr, TypeVarTupleExpr): tvar_def = TypeVarTupleType( name=name, fullname=tvar_expr.fullname, id=TypeVarId(i, namespace=namespace), upper_bound=tvar_expr.upper_bound, tuple_fallback=tvar_expr.tuple_fallback, default=tvar_expr.default, line=tvar_expr.line, column=tvar_expr.column, ) else: assert False self.scope[tvar_expr.fullname] = tvar_def return tvar_def def bind_existing(self, tvar_def: TypeVarLikeType) -> None: self.scope[tvar_def.fullname] = tvar_def def get_binding(self, item: str | SymbolTableNode) -> TypeVarLikeType | None: fullname = item.fullname if isinstance(item, SymbolTableNode) else item assert fullname if fullname in self.scope: return self.scope[fullname] elif self.parent is not None: return self.parent.get_binding(fullname) else: return None def __str__(self) -> str: me = ", ".join(f"{k}: {v.name}`{v.id}" for k, v in self.scope.items()) if self.parent is None: return me return f"{self.parent} <- {me}"
algorandfoundation/puya
src/puyapy/_vendor/mypy/tvar_scope.py
Python
NOASSERTION
5,895
"""Type visitor classes. This module defines the type visitors that are intended to be subclassed by other code. They have been separated out into their own module to ease converting mypy to run under mypyc, since currently mypyc-extension classes can extend interpreted classes but not the other way around. Separating them out, then, allows us to compile types before we can compile everything that uses a TypeVisitor. The visitors are all re-exported from mypy.types and that is how other modules refer to them. """ from __future__ import annotations from abc import abstractmethod from typing import Any, Callable, Final, Generic, Iterable, Sequence, TypeVar, cast from mypy_extensions import mypyc_attr, trait from mypy.types import ( AnyType, CallableArgument, CallableType, DeletedType, EllipsisType, ErasedType, Instance, LiteralType, NoneType, Overloaded, Parameters, ParamSpecType, PartialType, PlaceholderType, RawExpressionType, TupleType, Type, TypeAliasType, TypedDictType, TypeList, TypeType, TypeVarLikeType, TypeVarTupleType, TypeVarType, UnboundType, UninhabitedType, UnionType, UnpackType, get_proper_type, ) T = TypeVar("T") @trait @mypyc_attr(allow_interpreted_subclasses=True) class TypeVisitor(Generic[T]): """Visitor class for types (Type subclasses). The parameter T is the return type of the visit methods. """ @abstractmethod def visit_unbound_type(self, t: UnboundType) -> T: pass @abstractmethod def visit_any(self, t: AnyType) -> T: pass @abstractmethod def visit_none_type(self, t: NoneType) -> T: pass @abstractmethod def visit_uninhabited_type(self, t: UninhabitedType) -> T: pass @abstractmethod def visit_erased_type(self, t: ErasedType) -> T: pass @abstractmethod def visit_deleted_type(self, t: DeletedType) -> T: pass @abstractmethod def visit_type_var(self, t: TypeVarType) -> T: pass @abstractmethod def visit_param_spec(self, t: ParamSpecType) -> T: pass @abstractmethod def visit_parameters(self, t: Parameters) -> T: pass @abstractmethod def visit_type_var_tuple(self, t: TypeVarTupleType) -> T: pass @abstractmethod def visit_instance(self, t: Instance) -> T: pass @abstractmethod def visit_callable_type(self, t: CallableType) -> T: pass @abstractmethod def visit_overloaded(self, t: Overloaded) -> T: pass @abstractmethod def visit_tuple_type(self, t: TupleType) -> T: pass @abstractmethod def visit_typeddict_type(self, t: TypedDictType) -> T: pass @abstractmethod def visit_literal_type(self, t: LiteralType) -> T: pass @abstractmethod def visit_union_type(self, t: UnionType) -> T: pass @abstractmethod def visit_partial_type(self, t: PartialType) -> T: pass @abstractmethod def visit_type_type(self, t: TypeType) -> T: pass @abstractmethod def visit_type_alias_type(self, t: TypeAliasType) -> T: pass @abstractmethod def visit_unpack_type(self, t: UnpackType) -> T: pass @trait @mypyc_attr(allow_interpreted_subclasses=True) class SyntheticTypeVisitor(TypeVisitor[T]): """A TypeVisitor that also knows how to visit synthetic AST constructs. Not just real types. """ @abstractmethod def visit_type_list(self, t: TypeList) -> T: pass @abstractmethod def visit_callable_argument(self, t: CallableArgument) -> T: pass @abstractmethod def visit_ellipsis_type(self, t: EllipsisType) -> T: pass @abstractmethod def visit_raw_expression_type(self, t: RawExpressionType) -> T: pass @abstractmethod def visit_placeholder_type(self, t: PlaceholderType) -> T: pass @mypyc_attr(allow_interpreted_subclasses=True) class TypeTranslator(TypeVisitor[Type]): """Identity type transformation. Subclass this and override some methods to implement a non-trivial transformation. We cache the results of certain translations to avoid massively expanding the sizes of types. """ def __init__(self, cache: dict[Type, Type] | None = None) -> None: # For deduplication of results self.cache = cache def get_cached(self, t: Type) -> Type | None: if self.cache is None: return None return self.cache.get(t) def set_cached(self, orig: Type, new: Type) -> None: if self.cache is None: # Minor optimization: construct lazily self.cache = {} self.cache[orig] = new def visit_unbound_type(self, t: UnboundType) -> Type: return t def visit_any(self, t: AnyType) -> Type: return t def visit_none_type(self, t: NoneType) -> Type: return t def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_erased_type(self, t: ErasedType) -> Type: return t def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_instance(self, t: Instance) -> Type: last_known_value: LiteralType | None = None if t.last_known_value is not None: raw_last_known_value = t.last_known_value.accept(self) assert isinstance(raw_last_known_value, LiteralType) # type: ignore[misc] last_known_value = raw_last_known_value return Instance( typ=t.type, args=self.translate_types(t.args), line=t.line, column=t.column, last_known_value=last_known_value, extra_attrs=t.extra_attrs, ) def visit_type_var(self, t: TypeVarType) -> Type: return t def visit_param_spec(self, t: ParamSpecType) -> Type: return t def visit_parameters(self, t: Parameters) -> Type: return t.copy_modified(arg_types=self.translate_types(t.arg_types)) def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: return t def visit_partial_type(self, t: PartialType) -> Type: return t def visit_unpack_type(self, t: UnpackType) -> Type: return UnpackType(t.type.accept(self)) def visit_callable_type(self, t: CallableType) -> Type: return t.copy_modified( arg_types=self.translate_types(t.arg_types), ret_type=t.ret_type.accept(self), variables=self.translate_variables(t.variables), ) def visit_tuple_type(self, t: TupleType) -> Type: return TupleType( self.translate_types(t.items), # TODO: This appears to be unsafe. cast(Any, t.partial_fallback.accept(self)), t.line, t.column, ) def visit_typeddict_type(self, t: TypedDictType) -> Type: # Use cache to avoid O(n**2) or worse expansion of types during translation if cached := self.get_cached(t): return cached items = {item_name: item_type.accept(self) for (item_name, item_type) in t.items.items()} result = TypedDictType( items, t.required_keys, t.readonly_keys, # TODO: This appears to be unsafe. cast(Any, t.fallback.accept(self)), t.line, t.column, ) self.set_cached(t, result) return result def visit_literal_type(self, t: LiteralType) -> Type: fallback = t.fallback.accept(self) assert isinstance(fallback, Instance) # type: ignore[misc] return LiteralType(value=t.value, fallback=fallback, line=t.line, column=t.column) def visit_union_type(self, t: UnionType) -> Type: # Use cache to avoid O(n**2) or worse expansion of types during translation # (only for large unions, since caching adds overhead) use_cache = len(t.items) > 3 if use_cache and (cached := self.get_cached(t)): return cached result = UnionType( self.translate_types(t.items), t.line, t.column, uses_pep604_syntax=t.uses_pep604_syntax, ) if use_cache: self.set_cached(t, result) return result def translate_types(self, types: Iterable[Type]) -> list[Type]: return [t.accept(self) for t in types] def translate_variables( self, variables: Sequence[TypeVarLikeType] ) -> Sequence[TypeVarLikeType]: return variables def visit_overloaded(self, t: Overloaded) -> Type: items: list[CallableType] = [] for item in t.items: new = item.accept(self) assert isinstance(new, CallableType) # type: ignore[misc] items.append(new) return Overloaded(items=items) def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(t.item.accept(self), line=t.line, column=t.column) @abstractmethod def visit_type_alias_type(self, t: TypeAliasType) -> Type: # This method doesn't have a default implementation for type translators, # because type aliases are special: some information is contained in the # TypeAlias node, and we normally don't generate new nodes. Every subclass # must implement this depending on its semantics. pass @mypyc_attr(allow_interpreted_subclasses=True) class TypeQuery(SyntheticTypeVisitor[T]): """Visitor for performing queries of types. strategy is used to combine results for a series of types, common use cases involve a boolean query using `any` or `all`. Note: this visitor keeps an internal state (tracks type aliases to avoid recursion), so it should *never* be re-used for querying different types, create a new visitor instance instead. # TODO: check that we don't have existing violations of this rule. """ def __init__(self, strategy: Callable[[list[T]], T]) -> None: self.strategy = strategy # Keep track of the type aliases already visited. This is needed to avoid # infinite recursion on types like A = Union[int, List[A]]. self.seen_aliases: set[TypeAliasType] = set() # By default, we eagerly expand type aliases, and query also types in the # alias target. In most cases this is a desired behavior, but we may want # to skip targets in some cases (e.g. when collecting type variables). self.skip_alias_target = False def visit_unbound_type(self, t: UnboundType) -> T: return self.query_types(t.args) def visit_type_list(self, t: TypeList) -> T: return self.query_types(t.items) def visit_callable_argument(self, t: CallableArgument) -> T: return t.typ.accept(self) def visit_any(self, t: AnyType) -> T: return self.strategy([]) def visit_uninhabited_type(self, t: UninhabitedType) -> T: return self.strategy([]) def visit_none_type(self, t: NoneType) -> T: return self.strategy([]) def visit_erased_type(self, t: ErasedType) -> T: return self.strategy([]) def visit_deleted_type(self, t: DeletedType) -> T: return self.strategy([]) def visit_type_var(self, t: TypeVarType) -> T: return self.query_types([t.upper_bound, t.default] + t.values) def visit_param_spec(self, t: ParamSpecType) -> T: return self.query_types([t.upper_bound, t.default, t.prefix]) def visit_type_var_tuple(self, t: TypeVarTupleType) -> T: return self.query_types([t.upper_bound, t.default]) def visit_unpack_type(self, t: UnpackType) -> T: return self.query_types([t.type]) def visit_parameters(self, t: Parameters) -> T: return self.query_types(t.arg_types) def visit_partial_type(self, t: PartialType) -> T: return self.strategy([]) def visit_instance(self, t: Instance) -> T: return self.query_types(t.args) def visit_callable_type(self, t: CallableType) -> T: # FIX generics return self.query_types(t.arg_types + [t.ret_type]) def visit_tuple_type(self, t: TupleType) -> T: return self.query_types(t.items) def visit_typeddict_type(self, t: TypedDictType) -> T: return self.query_types(t.items.values()) def visit_raw_expression_type(self, t: RawExpressionType) -> T: return self.strategy([]) def visit_literal_type(self, t: LiteralType) -> T: return self.strategy([]) def visit_union_type(self, t: UnionType) -> T: return self.query_types(t.items) def visit_overloaded(self, t: Overloaded) -> T: return self.query_types(t.items) def visit_type_type(self, t: TypeType) -> T: return t.item.accept(self) def visit_ellipsis_type(self, t: EllipsisType) -> T: return self.strategy([]) def visit_placeholder_type(self, t: PlaceholderType) -> T: return self.query_types(t.args) def visit_type_alias_type(self, t: TypeAliasType) -> T: # Skip type aliases already visited types to avoid infinite recursion. # TODO: Ideally we should fire subvisitors here (or use caching) if we care # about duplicates. if t in self.seen_aliases: return self.strategy([]) self.seen_aliases.add(t) if self.skip_alias_target: return self.query_types(t.args) return get_proper_type(t).accept(self) def query_types(self, types: Iterable[Type]) -> T: """Perform a query for a list of types using the strategy to combine the results.""" return self.strategy([t.accept(self) for t in types]) # Return True if at least one type component returns True ANY_STRATEGY: Final = 0 # Return True if no type component returns False ALL_STRATEGY: Final = 1 class BoolTypeQuery(SyntheticTypeVisitor[bool]): """Visitor for performing recursive queries of types with a bool result. Use TypeQuery if you need non-bool results. 'strategy' is used to combine results for a series of types. It must be ANY_STRATEGY or ALL_STRATEGY. Note: This visitor keeps an internal state (tracks type aliases to avoid recursion), so it should *never* be re-used for querying different types unless you call reset() first. """ def __init__(self, strategy: int) -> None: self.strategy = strategy if strategy == ANY_STRATEGY: self.default = False else: assert strategy == ALL_STRATEGY self.default = True # Keep track of the type aliases already visited. This is needed to avoid # infinite recursion on types like A = Union[int, List[A]]. An empty set is # represented as None as a micro-optimization. self.seen_aliases: set[TypeAliasType] | None = None # By default, we eagerly expand type aliases, and query also types in the # alias target. In most cases this is a desired behavior, but we may want # to skip targets in some cases (e.g. when collecting type variables). self.skip_alias_target = False def reset(self) -> None: """Clear mutable state (but preserve strategy). This *must* be called if you want to reuse the visitor. """ self.seen_aliases = None def visit_unbound_type(self, t: UnboundType) -> bool: return self.query_types(t.args) def visit_type_list(self, t: TypeList) -> bool: return self.query_types(t.items) def visit_callable_argument(self, t: CallableArgument) -> bool: return t.typ.accept(self) def visit_any(self, t: AnyType) -> bool: return self.default def visit_uninhabited_type(self, t: UninhabitedType) -> bool: return self.default def visit_none_type(self, t: NoneType) -> bool: return self.default def visit_erased_type(self, t: ErasedType) -> bool: return self.default def visit_deleted_type(self, t: DeletedType) -> bool: return self.default def visit_type_var(self, t: TypeVarType) -> bool: return self.query_types([t.upper_bound, t.default] + t.values) def visit_param_spec(self, t: ParamSpecType) -> bool: return self.query_types([t.upper_bound, t.default]) def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: return self.query_types([t.upper_bound, t.default]) def visit_unpack_type(self, t: UnpackType) -> bool: return self.query_types([t.type]) def visit_parameters(self, t: Parameters) -> bool: return self.query_types(t.arg_types) def visit_partial_type(self, t: PartialType) -> bool: return self.default def visit_instance(self, t: Instance) -> bool: return self.query_types(t.args) def visit_callable_type(self, t: CallableType) -> bool: # FIX generics # Avoid allocating any objects here as an optimization. args = self.query_types(t.arg_types) ret = t.ret_type.accept(self) if self.strategy == ANY_STRATEGY: return args or ret else: return args and ret def visit_tuple_type(self, t: TupleType) -> bool: return self.query_types(t.items) def visit_typeddict_type(self, t: TypedDictType) -> bool: return self.query_types(list(t.items.values())) def visit_raw_expression_type(self, t: RawExpressionType) -> bool: return self.default def visit_literal_type(self, t: LiteralType) -> bool: return self.default def visit_union_type(self, t: UnionType) -> bool: return self.query_types(t.items) def visit_overloaded(self, t: Overloaded) -> bool: return self.query_types(t.items) # type: ignore[arg-type] def visit_type_type(self, t: TypeType) -> bool: return t.item.accept(self) def visit_ellipsis_type(self, t: EllipsisType) -> bool: return self.default def visit_placeholder_type(self, t: PlaceholderType) -> bool: return self.query_types(t.args) def visit_type_alias_type(self, t: TypeAliasType) -> bool: # Skip type aliases already visited types to avoid infinite recursion. # TODO: Ideally we should fire subvisitors here (or use caching) if we care # about duplicates. if self.seen_aliases is None: self.seen_aliases = set() elif t in self.seen_aliases: return self.default self.seen_aliases.add(t) if self.skip_alias_target: return self.query_types(t.args) return get_proper_type(t).accept(self) def query_types(self, types: list[Type] | tuple[Type, ...]) -> bool: """Perform a query for a sequence of types using the strategy to combine the results.""" # Special-case for lists and tuples to allow mypyc to produce better code. if isinstance(types, list): if self.strategy == ANY_STRATEGY: return any(t.accept(self) for t in types) else: return all(t.accept(self) for t in types) else: if self.strategy == ANY_STRATEGY: return any(t.accept(self) for t in types) else: return all(t.accept(self) for t in types)
algorandfoundation/puya
src/puyapy/_vendor/mypy/type_visitor.py
Python
NOASSERTION
19,402
"""Semantic analysis of types""" from __future__ import annotations import itertools from contextlib import contextmanager from typing import Callable, Final, Iterable, Iterator, List, Sequence, Tuple, TypeVar from typing_extensions import Protocol from mypy import errorcodes as codes, message_registry, nodes from mypy.errorcodes import ErrorCode from mypy.expandtype import expand_type from mypy.message_registry import ( INVALID_PARAM_SPEC_LOCATION, INVALID_PARAM_SPEC_LOCATION_NOTE, TYPEDDICT_OVERRIDE_MERGE, ) from mypy.messages import ( MessageBuilder, format_type, format_type_bare, quote_type_string, wrong_type_arg_count, ) from mypy.nodes import ( ARG_NAMED, ARG_NAMED_OPT, ARG_OPT, ARG_POS, ARG_STAR, ARG_STAR2, MISSING_FALLBACK, SYMBOL_FUNCBASE_TYPES, ArgKind, Context, Decorator, MypyFile, ParamSpecExpr, PlaceholderNode, SymbolTableNode, TypeAlias, TypeInfo, TypeVarExpr, TypeVarLikeExpr, TypeVarTupleExpr, Var, check_arg_kinds, check_arg_names, get_nongen_builtins, ) from mypy.options import INLINE_TYPEDDICT, Options from mypy.plugin import AnalyzeTypeContext, Plugin, TypeAnalyzerPluginInterface from mypy.semanal_shared import ( SemanticAnalyzerCoreInterface, SemanticAnalyzerInterface, paramspec_args, paramspec_kwargs, ) from mypy.state import state from mypy.tvar_scope import TypeVarLikeScope from mypy.types import ( ANNOTATED_TYPE_NAMES, ANY_STRATEGY, FINAL_TYPE_NAMES, LITERAL_TYPE_NAMES, NEVER_NAMES, TYPE_ALIAS_NAMES, AnyType, BoolTypeQuery, CallableArgument, CallableType, DeletedType, EllipsisType, ErasedType, Instance, LiteralType, NoneType, Overloaded, Parameters, ParamSpecFlavor, ParamSpecType, PartialType, PlaceholderType, ProperType, RawExpressionType, ReadOnlyType, RequiredType, SyntheticTypeVisitor, TrivialSyntheticTypeTranslator, TupleType, Type, TypeAliasType, TypedDictType, TypeList, TypeOfAny, TypeQuery, TypeType, TypeVarId, TypeVarLikeType, TypeVarTupleType, TypeVarType, UnboundType, UninhabitedType, UnionType, UnpackType, callable_with_ellipsis, find_unpack_in_list, flatten_nested_tuples, get_proper_type, has_type_vars, ) from mypy.types_utils import is_bad_type_type_item from mypy.typevars import fill_typevars T = TypeVar("T") type_constructors: Final = { "typing.Callable", "typing.Optional", "typing.Tuple", "typing.Type", "typing.Union", *LITERAL_TYPE_NAMES, *ANNOTATED_TYPE_NAMES, } ARG_KINDS_BY_CONSTRUCTOR: Final = { "mypy_extensions.Arg": ARG_POS, "mypy_extensions.DefaultArg": ARG_OPT, "mypy_extensions.NamedArg": ARG_NAMED, "mypy_extensions.DefaultNamedArg": ARG_NAMED_OPT, "mypy_extensions.VarArg": ARG_STAR, "mypy_extensions.KwArg": ARG_STAR2, } GENERIC_STUB_NOT_AT_RUNTIME_TYPES: Final = { "queue.Queue", "builtins._PathLike", "asyncio.futures.Future", } SELF_TYPE_NAMES: Final = {"typing.Self", "typing_extensions.Self"} def analyze_type_alias( type: Type, api: SemanticAnalyzerCoreInterface, tvar_scope: TypeVarLikeScope, plugin: Plugin, options: Options, is_typeshed_stub: bool, allow_placeholder: bool = False, in_dynamic_func: bool = False, global_scope: bool = True, allowed_alias_tvars: list[TypeVarLikeType] | None = None, alias_type_params_names: list[str] | None = None, python_3_12_type_alias: bool = False, ) -> tuple[Type, set[str]]: """Analyze r.h.s. of a (potential) type alias definition. If `node` is valid as a type alias rvalue, return the resulting type and a set of full names of type aliases it depends on (directly or indirectly). 'node' must have been semantically analyzed. """ analyzer = TypeAnalyser( api, tvar_scope, plugin, options, is_typeshed_stub, defining_alias=True, allow_placeholder=allow_placeholder, prohibit_self_type="type alias target", allowed_alias_tvars=allowed_alias_tvars, alias_type_params_names=alias_type_params_names, python_3_12_type_alias=python_3_12_type_alias, ) analyzer.in_dynamic_func = in_dynamic_func analyzer.global_scope = global_scope res = type.accept(analyzer) return res, analyzer.aliases_used def no_subscript_builtin_alias(name: str, propose_alt: bool = True) -> str: class_name = name.split(".")[-1] msg = f'"{class_name}" is not subscriptable' # This should never be called if the python_version is 3.9 or newer nongen_builtins = get_nongen_builtins((3, 8)) replacement = nongen_builtins[name] if replacement and propose_alt: msg += f', use "{replacement}" instead' return msg class TypeAnalyser(SyntheticTypeVisitor[Type], TypeAnalyzerPluginInterface): """Semantic analyzer for types. Converts unbound types into bound types. This is a no-op for already bound types. If an incomplete reference is encountered, this does a defer. The caller never needs to defer. """ # Is this called from an untyped function definition? in_dynamic_func: bool = False # Is this called from global scope? global_scope: bool = True def __init__( self, api: SemanticAnalyzerCoreInterface, tvar_scope: TypeVarLikeScope, plugin: Plugin, options: Options, is_typeshed_stub: bool, *, defining_alias: bool = False, python_3_12_type_alias: bool = False, allow_tuple_literal: bool = False, allow_unbound_tvars: bool = False, allow_placeholder: bool = False, allow_typed_dict_special_forms: bool = False, allow_param_spec_literals: bool = False, allow_unpack: bool = False, report_invalid_types: bool = True, prohibit_self_type: str | None = None, allowed_alias_tvars: list[TypeVarLikeType] | None = None, allow_type_any: bool = False, alias_type_params_names: list[str] | None = None, ) -> None: self.api = api self.fail_func = api.fail self.note_func = api.note self.tvar_scope = tvar_scope # Are we analysing a type alias definition rvalue? self.defining_alias = defining_alias self.python_3_12_type_alias = python_3_12_type_alias self.allow_tuple_literal = allow_tuple_literal # Positive if we are analyzing arguments of another (outer) type self.nesting_level = 0 # Should we allow new type syntax when targeting older Python versions # like 'list[int]' or 'X | Y' (allowed in stubs and with `__future__` import)? self.always_allow_new_syntax = self.api.is_stub_file or self.api.is_future_flag_set( "annotations" ) # Should we accept unbound type variables? This is currently used for class bases, # and alias right hand sides (before they are analyzed as type aliases). self.allow_unbound_tvars = allow_unbound_tvars if allowed_alias_tvars is None: allowed_alias_tvars = [] self.allowed_alias_tvars = allowed_alias_tvars self.alias_type_params_names = alias_type_params_names # If false, record incomplete ref if we generate PlaceholderType. self.allow_placeholder = allow_placeholder # Are we in a context where Required[] is allowed? self.allow_typed_dict_special_forms = allow_typed_dict_special_forms # Are we in a context where ParamSpec literals are allowed? self.allow_param_spec_literals = allow_param_spec_literals # Are we in context where literal "..." specifically is allowed? self.allow_ellipsis = False # Should we report an error whenever we encounter a RawExpressionType outside # of a Literal context: e.g. whenever we encounter an invalid type? Normally, # we want to report an error, but the caller may want to do more specialized # error handling. self.report_invalid_types = report_invalid_types self.plugin = plugin self.options = options self.is_typeshed_stub = is_typeshed_stub # Names of type aliases encountered while analysing a type will be collected here. self.aliases_used: set[str] = set() self.prohibit_self_type = prohibit_self_type # Allow variables typed as Type[Any] and type (useful for base classes). self.allow_type_any = allow_type_any self.allow_type_var_tuple = False self.allow_unpack = allow_unpack def lookup_qualified( self, name: str, ctx: Context, suppress_errors: bool = False ) -> SymbolTableNode | None: return self.api.lookup_qualified(name, ctx, suppress_errors) def lookup_fully_qualified(self, name: str) -> SymbolTableNode: return self.api.lookup_fully_qualified(name) def visit_unbound_type(self, t: UnboundType, defining_literal: bool = False) -> Type: typ = self.visit_unbound_type_nonoptional(t, defining_literal) if t.optional: # We don't need to worry about double-wrapping Optionals or # wrapping Anys: Union simplification will take care of that. return make_optional_type(typ) return typ def not_declared_in_type_params(self, tvar_name: str) -> bool: return ( self.alias_type_params_names is not None and tvar_name not in self.alias_type_params_names ) def visit_unbound_type_nonoptional(self, t: UnboundType, defining_literal: bool) -> Type: sym = self.lookup_qualified(t.name, t) if sym is not None: node = sym.node if isinstance(node, PlaceholderNode): if node.becomes_typeinfo: # Reference to placeholder type. if self.api.final_iteration: self.cannot_resolve_type(t) return AnyType(TypeOfAny.from_error) elif self.allow_placeholder: self.api.defer() else: self.api.record_incomplete_ref() # Always allow ParamSpec for placeholders, if they are actually not valid, # they will be reported later, after we resolve placeholders. return PlaceholderType( node.fullname, self.anal_array( t.args, allow_param_spec=True, allow_param_spec_literals=True, allow_unpack=True, ), t.line, ) else: if self.api.final_iteration: self.cannot_resolve_type(t) return AnyType(TypeOfAny.from_error) else: # Reference to an unknown placeholder node. self.api.record_incomplete_ref() return AnyType(TypeOfAny.special_form) if node is None: self.fail(f"Internal error (node is None, kind={sym.kind})", t) return AnyType(TypeOfAny.special_form) fullname = node.fullname hook = self.plugin.get_type_analyze_hook(fullname) if hook is not None: return hook(AnalyzeTypeContext(t, t, self)) if ( fullname in get_nongen_builtins(self.options.python_version) and t.args and not self.always_allow_new_syntax ): self.fail( no_subscript_builtin_alias(fullname, propose_alt=not self.defining_alias), t ) tvar_def = self.tvar_scope.get_binding(sym) if isinstance(sym.node, ParamSpecExpr): if tvar_def is None: if self.allow_unbound_tvars: return t if self.defining_alias and self.not_declared_in_type_params(t.name): msg = f'ParamSpec "{t.name}" is not included in type_params' else: msg = f'ParamSpec "{t.name}" is unbound' self.fail(msg, t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) assert isinstance(tvar_def, ParamSpecType) if len(t.args) > 0: self.fail( f'ParamSpec "{t.name}" used with arguments', t, code=codes.VALID_TYPE ) # Change the line number return ParamSpecType( tvar_def.name, tvar_def.fullname, tvar_def.id, tvar_def.flavor, tvar_def.upper_bound, tvar_def.default, line=t.line, column=t.column, ) if ( isinstance(sym.node, TypeVarExpr) and self.defining_alias and not defining_literal and (tvar_def is None or tvar_def not in self.allowed_alias_tvars) ): if self.not_declared_in_type_params(t.name): if self.python_3_12_type_alias: msg = message_registry.TYPE_PARAMETERS_SHOULD_BE_DECLARED.format( f'"{t.name}"' ) else: msg = f'Type variable "{t.name}" is not included in type_params' else: msg = f'Can\'t use bound type variable "{t.name}" to define generic alias' self.fail(msg, t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) if isinstance(sym.node, TypeVarExpr) and tvar_def is not None: assert isinstance(tvar_def, TypeVarType) if len(t.args) > 0: self.fail( f'Type variable "{t.name}" used with arguments', t, code=codes.VALID_TYPE ) # Change the line number return tvar_def.copy_modified(line=t.line, column=t.column) if isinstance(sym.node, TypeVarTupleExpr) and ( tvar_def is not None and self.defining_alias and tvar_def not in self.allowed_alias_tvars ): if self.not_declared_in_type_params(t.name): msg = f'Type variable "{t.name}" is not included in type_params' else: msg = f'Can\'t use bound type variable "{t.name}" to define generic alias' self.fail(msg, t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) if isinstance(sym.node, TypeVarTupleExpr): if tvar_def is None: if self.allow_unbound_tvars: return t if self.defining_alias and self.not_declared_in_type_params(t.name): if self.python_3_12_type_alias: msg = message_registry.TYPE_PARAMETERS_SHOULD_BE_DECLARED.format( f'"{t.name}"' ) else: msg = f'TypeVarTuple "{t.name}" is not included in type_params' else: msg = f'TypeVarTuple "{t.name}" is unbound' self.fail(msg, t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) assert isinstance(tvar_def, TypeVarTupleType) if not self.allow_type_var_tuple: self.fail( f'TypeVarTuple "{t.name}" is only valid with an unpack', t, code=codes.VALID_TYPE, ) return AnyType(TypeOfAny.from_error) if len(t.args) > 0: self.fail( f'Type variable "{t.name}" used with arguments', t, code=codes.VALID_TYPE ) # Change the line number return TypeVarTupleType( tvar_def.name, tvar_def.fullname, tvar_def.id, tvar_def.upper_bound, sym.node.tuple_fallback, tvar_def.default, line=t.line, column=t.column, ) special = self.try_analyze_special_unbound_type(t, fullname) if special is not None: return special if isinstance(node, TypeAlias): self.aliases_used.add(fullname) an_args = self.anal_array( t.args, allow_param_spec=True, allow_param_spec_literals=node.has_param_spec_type, allow_unpack=True, # Fixed length unpacks can be used for non-variadic aliases. ) if node.has_param_spec_type and len(node.alias_tvars) == 1: an_args = self.pack_paramspec_args(an_args) disallow_any = self.options.disallow_any_generics and not self.is_typeshed_stub res = instantiate_type_alias( node, an_args, self.fail, node.no_args, t, self.options, unexpanded_type=t, disallow_any=disallow_any, empty_tuple_index=t.empty_tuple_index, ) # The only case where instantiate_type_alias() can return an incorrect instance is # when it is top-level instance, so no need to recurse. if ( isinstance(res, ProperType) and isinstance(res, Instance) and not (self.defining_alias and self.nesting_level == 0) and not validate_instance(res, self.fail, t.empty_tuple_index) ): fix_instance( res, self.fail, self.note, disallow_any=disallow_any, options=self.options, use_generic_error=True, unexpanded_type=t, ) if node.eager: res = get_proper_type(res) return res elif isinstance(node, TypeInfo): return self.analyze_type_with_type_info(node, t.args, t, t.empty_tuple_index) elif node.fullname in TYPE_ALIAS_NAMES: return AnyType(TypeOfAny.special_form) # Concatenate is an operator, no need for a proper type elif node.fullname in ("typing_extensions.Concatenate", "typing.Concatenate"): # We check the return type further up the stack for valid use locations return self.apply_concatenate_operator(t) else: return self.analyze_unbound_type_without_type_info(t, sym, defining_literal) else: # sym is None return AnyType(TypeOfAny.special_form) def pack_paramspec_args(self, an_args: Sequence[Type]) -> list[Type]: # "Aesthetic" ParamSpec literals for single ParamSpec: C[int, str] -> C[[int, str]]. # These do not support mypy_extensions VarArgs, etc. as they were already analyzed # TODO: should these be re-analyzed to get rid of this inconsistency? count = len(an_args) if count == 0: return [] if count == 1 and isinstance(get_proper_type(an_args[0]), AnyType): # Single Any is interpreted as ..., rather that a single argument with Any type. # I didn't find this in the PEP, but it sounds reasonable. return list(an_args) if any(isinstance(a, (Parameters, ParamSpecType)) for a in an_args): if len(an_args) > 1: first_wrong = next( arg for arg in an_args if isinstance(arg, (Parameters, ParamSpecType)) ) self.fail( "Nested parameter specifications are not allowed", first_wrong, code=codes.VALID_TYPE, ) return [AnyType(TypeOfAny.from_error)] return list(an_args) first = an_args[0] return [ Parameters( an_args, [ARG_POS] * count, [None] * count, line=first.line, column=first.column ) ] def cannot_resolve_type(self, t: UnboundType) -> None: # TODO: Move error message generation to messages.py. We'd first # need access to MessageBuilder here. Also move the similar # message generation logic in semanal.py. self.api.fail(f'Cannot resolve name "{t.name}" (possible cyclic definition)', t) if self.api.is_func_scope(): self.note("Recursive types are not allowed at function scope", t) def apply_concatenate_operator(self, t: UnboundType) -> Type: if len(t.args) == 0: self.api.fail("Concatenate needs type arguments", t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) # Last argument has to be ParamSpec or Ellipsis. ps = self.anal_type(t.args[-1], allow_param_spec=True, allow_ellipsis=True) if not isinstance(ps, (ParamSpecType, Parameters)): if isinstance(ps, UnboundType) and self.allow_unbound_tvars: sym = self.lookup_qualified(ps.name, t) if sym is not None and isinstance(sym.node, ParamSpecExpr): return ps self.api.fail( "The last parameter to Concatenate needs to be a ParamSpec", t, code=codes.VALID_TYPE, ) return AnyType(TypeOfAny.from_error) elif isinstance(ps, ParamSpecType) and ps.prefix.arg_types: self.api.fail("Nested Concatenates are invalid", t, code=codes.VALID_TYPE) args = self.anal_array(t.args[:-1]) pre = ps.prefix if isinstance(ps, ParamSpecType) else ps # mypy can't infer this :( names: list[str | None] = [None] * len(args) pre = Parameters( args + pre.arg_types, [ARG_POS] * len(args) + pre.arg_kinds, names + pre.arg_names, line=t.line, column=t.column, ) return ps.copy_modified(prefix=pre) if isinstance(ps, ParamSpecType) else pre def try_analyze_special_unbound_type(self, t: UnboundType, fullname: str) -> Type | None: """Bind special type that is recognized through magic name such as 'typing.Any'. Return the bound type if successful, and return None if the type is a normal type. """ if fullname == "builtins.None": return NoneType() elif fullname == "typing.Any" or fullname == "builtins.Any": return AnyType(TypeOfAny.explicit, line=t.line, column=t.column) elif fullname in FINAL_TYPE_NAMES: self.fail( "Final can be only used as an outermost qualifier in a variable annotation", t, code=codes.VALID_TYPE, ) return AnyType(TypeOfAny.from_error) elif fullname == "typing.Tuple" or ( fullname == "builtins.tuple" and (self.always_allow_new_syntax or self.options.python_version >= (3, 9)) ): # Tuple is special because it is involved in builtin import cycle # and may be not ready when used. sym = self.api.lookup_fully_qualified_or_none("builtins.tuple") if not sym or isinstance(sym.node, PlaceholderNode): if self.api.is_incomplete_namespace("builtins"): self.api.record_incomplete_ref() else: self.fail('Name "tuple" is not defined', t) return AnyType(TypeOfAny.special_form) if len(t.args) == 0 and not t.empty_tuple_index: # Bare 'Tuple' is same as 'tuple' any_type = self.get_omitted_any(t) return self.named_type("builtins.tuple", [any_type], line=t.line, column=t.column) if len(t.args) == 2 and isinstance(t.args[1], EllipsisType): # Tuple[T, ...] (uniform, variable-length tuple) instance = self.named_type("builtins.tuple", [self.anal_type(t.args[0])]) instance.line = t.line return instance return self.tuple_type( self.anal_array(t.args, allow_unpack=True), line=t.line, column=t.column ) elif fullname == "typing.Union": items = self.anal_array(t.args) return UnionType.make_union(items) elif fullname == "typing.Optional": if len(t.args) != 1: self.fail( "Optional[...] must have exactly one type argument", t, code=codes.VALID_TYPE ) return AnyType(TypeOfAny.from_error) item = self.anal_type(t.args[0]) return make_optional_type(item) elif fullname == "typing.Callable": return self.analyze_callable_type(t) elif fullname == "typing.Type" or ( fullname == "builtins.type" and (self.always_allow_new_syntax or self.options.python_version >= (3, 9)) ): if len(t.args) == 0: if fullname == "typing.Type": any_type = self.get_omitted_any(t) return TypeType(any_type, line=t.line, column=t.column) else: # To prevent assignment of 'builtins.type' inferred as 'builtins.object' # See https://github.com/python/mypy/issues/9476 for more information return None if len(t.args) != 1: type_str = "Type[...]" if fullname == "typing.Type" else "type[...]" self.fail( type_str + " must have exactly one type argument", t, code=codes.VALID_TYPE ) item = self.anal_type(t.args[0]) if is_bad_type_type_item(item): self.fail("Type[...] can't contain another Type[...]", t, code=codes.VALID_TYPE) item = AnyType(TypeOfAny.from_error) return TypeType.make_normalized(item, line=t.line, column=t.column) elif fullname == "typing.ClassVar": if self.nesting_level > 0: self.fail( "Invalid type: ClassVar nested inside other type", t, code=codes.VALID_TYPE ) if len(t.args) == 0: return AnyType(TypeOfAny.from_omitted_generics, line=t.line, column=t.column) if len(t.args) != 1: self.fail( "ClassVar[...] must have at most one type argument", t, code=codes.VALID_TYPE ) return AnyType(TypeOfAny.from_error) return self.anal_type(t.args[0]) elif fullname in NEVER_NAMES: return UninhabitedType() elif fullname in LITERAL_TYPE_NAMES: return self.analyze_literal_type(t) elif fullname in ANNOTATED_TYPE_NAMES: if len(t.args) < 2: self.fail( "Annotated[...] must have exactly one type argument" " and at least one annotation", t, code=codes.VALID_TYPE, ) return AnyType(TypeOfAny.from_error) return self.anal_type(t.args[0]) elif fullname in ("typing_extensions.Required", "typing.Required"): if not self.allow_typed_dict_special_forms: self.fail( "Required[] can be only used in a TypedDict definition", t, code=codes.VALID_TYPE, ) return AnyType(TypeOfAny.from_error) if len(t.args) != 1: self.fail( "Required[] must have exactly one type argument", t, code=codes.VALID_TYPE ) return AnyType(TypeOfAny.from_error) return RequiredType( self.anal_type(t.args[0], allow_typed_dict_special_forms=True), required=True ) elif fullname in ("typing_extensions.NotRequired", "typing.NotRequired"): if not self.allow_typed_dict_special_forms: self.fail( "NotRequired[] can be only used in a TypedDict definition", t, code=codes.VALID_TYPE, ) return AnyType(TypeOfAny.from_error) if len(t.args) != 1: self.fail( "NotRequired[] must have exactly one type argument", t, code=codes.VALID_TYPE ) return AnyType(TypeOfAny.from_error) return RequiredType( self.anal_type(t.args[0], allow_typed_dict_special_forms=True), required=False ) elif fullname in ("typing_extensions.ReadOnly", "typing.ReadOnly"): if not self.allow_typed_dict_special_forms: self.fail( "ReadOnly[] can be only used in a TypedDict definition", t, code=codes.VALID_TYPE, ) return AnyType(TypeOfAny.from_error) if len(t.args) != 1: self.fail( '"ReadOnly[]" must have exactly one type argument', t, code=codes.VALID_TYPE ) return AnyType(TypeOfAny.from_error) return ReadOnlyType(self.anal_type(t.args[0], allow_typed_dict_special_forms=True)) elif ( self.anal_type_guard_arg(t, fullname) is not None or self.anal_type_is_arg(t, fullname) is not None ): # In most contexts, TypeGuard[...] acts as an alias for bool (ignoring its args) return self.named_type("builtins.bool") elif fullname in ("typing.Unpack", "typing_extensions.Unpack"): if len(t.args) != 1: self.fail("Unpack[...] requires exactly one type argument", t) return AnyType(TypeOfAny.from_error) if not self.allow_unpack: self.fail(message_registry.INVALID_UNPACK_POSITION, t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) self.allow_type_var_tuple = True result = UnpackType(self.anal_type(t.args[0]), line=t.line, column=t.column) self.allow_type_var_tuple = False return result elif fullname in SELF_TYPE_NAMES: if t.args: self.fail("Self type cannot have type arguments", t) if self.prohibit_self_type is not None: self.fail(f"Self type cannot be used in {self.prohibit_self_type}", t) return AnyType(TypeOfAny.from_error) if self.api.type is None: self.fail("Self type is only allowed in annotations within class definition", t) return AnyType(TypeOfAny.from_error) if self.api.type.has_base("builtins.type"): self.fail("Self type cannot be used in a metaclass", t) if self.api.type.self_type is not None: if self.api.type.is_final: return fill_typevars(self.api.type) return self.api.type.self_type.copy_modified(line=t.line, column=t.column) # TODO: verify this is unreachable and replace with an assert? self.fail("Unexpected Self type", t) return AnyType(TypeOfAny.from_error) return None def get_omitted_any(self, typ: Type, fullname: str | None = None) -> AnyType: disallow_any = not self.is_typeshed_stub and self.options.disallow_any_generics return get_omitted_any(disallow_any, self.fail, self.note, typ, self.options, fullname) def analyze_type_with_type_info( self, info: TypeInfo, args: Sequence[Type], ctx: Context, empty_tuple_index: bool ) -> Type: """Bind unbound type when were able to find target TypeInfo. This handles simple cases like 'int', 'modname.UserClass[str]', etc. """ if len(args) > 0 and info.fullname == "builtins.tuple": fallback = Instance(info, [AnyType(TypeOfAny.special_form)], ctx.line) return TupleType(self.anal_array(args, allow_unpack=True), fallback, ctx.line) # Analyze arguments and (usually) construct Instance type. The # number of type arguments and their values are # checked only later, since we do not always know the # valid count at this point. Thus we may construct an # Instance with an invalid number of type arguments. # # We allow ParamSpec literals based on a heuristic: it will be # checked later anyways but the error message may be worse. instance = Instance( info, self.anal_array( args, allow_param_spec=True, allow_param_spec_literals=info.has_param_spec_type, allow_unpack=True, # Fixed length tuples can be used for non-variadic types. ), ctx.line, ctx.column, ) if len(info.type_vars) == 1 and info.has_param_spec_type: instance.args = tuple(self.pack_paramspec_args(instance.args)) # Check type argument count. instance.args = tuple(flatten_nested_tuples(instance.args)) if not (self.defining_alias and self.nesting_level == 0) and not validate_instance( instance, self.fail, empty_tuple_index ): fix_instance( instance, self.fail, self.note, disallow_any=self.options.disallow_any_generics and not self.is_typeshed_stub, options=self.options, ) tup = info.tuple_type if tup is not None: # The class has a Tuple[...] base class so it will be # represented as a tuple type. if info.special_alias: return instantiate_type_alias( info.special_alias, # TODO: should we allow NamedTuples generic in ParamSpec? self.anal_array(args, allow_unpack=True), self.fail, False, ctx, self.options, use_standard_error=True, ) return tup.copy_modified( items=self.anal_array(tup.items, allow_unpack=True), fallback=instance ) td = info.typeddict_type if td is not None: # The class has a TypedDict[...] base class so it will be # represented as a typeddict type. if info.special_alias: return instantiate_type_alias( info.special_alias, # TODO: should we allow TypedDicts generic in ParamSpec? self.anal_array(args, allow_unpack=True), self.fail, False, ctx, self.options, use_standard_error=True, ) # Create a named TypedDictType return td.copy_modified( item_types=self.anal_array(list(td.items.values())), fallback=instance ) if info.fullname == "types.NoneType": self.fail( "NoneType should not be used as a type, please use None instead", ctx, code=codes.VALID_TYPE, ) return NoneType(ctx.line, ctx.column) return instance def analyze_unbound_type_without_type_info( self, t: UnboundType, sym: SymbolTableNode, defining_literal: bool ) -> Type: """Figure out what an unbound type that doesn't refer to a TypeInfo node means. This is something unusual. We try our best to find out what it is. """ name = sym.fullname if name is None: assert sym.node is not None name = sym.node.name # Option 1: # Something with an Any type -- make it an alias for Any in a type # context. This is slightly problematic as it allows using the type 'Any' # as a base class -- however, this will fail soon at runtime so the problem # is pretty minor. if isinstance(sym.node, Var): typ = get_proper_type(sym.node.type) if isinstance(typ, AnyType): return AnyType( TypeOfAny.from_unimported_type, missing_import_name=typ.missing_import_name ) elif self.allow_type_any: if isinstance(typ, Instance) and typ.type.fullname == "builtins.type": return AnyType(TypeOfAny.special_form) if isinstance(typ, TypeType) and isinstance(typ.item, AnyType): return AnyType(TypeOfAny.from_another_any, source_any=typ.item) # Option 2: # Unbound type variable. Currently these may be still valid, # for example when defining a generic type alias. unbound_tvar = ( isinstance(sym.node, (TypeVarExpr, TypeVarTupleExpr)) and self.tvar_scope.get_binding(sym) is None ) if self.allow_unbound_tvars and unbound_tvar: return t # Option 3: # Enum value. Note: we only want to return a LiteralType when # we're using this enum value specifically within context of # a "Literal[...]" type. So, if `defining_literal` is not set, # we bail out early with an error. # # If, in the distant future, we decide to permit things like # `def foo(x: Color.RED) -> None: ...`, we can remove that # check entirely. if ( isinstance(sym.node, Var) and sym.node.info and sym.node.info.is_enum and not sym.node.name.startswith("__") ): value = sym.node.name base_enum_short_name = sym.node.info.name if not defining_literal: msg = message_registry.INVALID_TYPE_RAW_ENUM_VALUE.format( base_enum_short_name, value ) self.fail(msg.value, t, code=msg.code) return AnyType(TypeOfAny.from_error) return LiteralType( value=value, fallback=Instance(sym.node.info, [], line=t.line, column=t.column), line=t.line, column=t.column, ) # None of the above options worked. We parse the args (if there are any) # to make sure there are no remaining semanal-only types, then give up. t = t.copy_modified(args=self.anal_array(t.args)) # TODO: Move this message building logic to messages.py. notes: list[str] = [] error_code = codes.VALID_TYPE if isinstance(sym.node, Var): notes.append( "See https://mypy.readthedocs.io/en/" "stable/common_issues.html#variables-vs-type-aliases" ) message = 'Variable "{}" is not valid as a type' elif isinstance(sym.node, (SYMBOL_FUNCBASE_TYPES, Decorator)): message = 'Function "{}" is not valid as a type' if name == "builtins.any": notes.append('Perhaps you meant "typing.Any" instead of "any"?') elif name == "builtins.callable": notes.append('Perhaps you meant "typing.Callable" instead of "callable"?') else: notes.append('Perhaps you need "Callable[...]" or a callback protocol?') elif isinstance(sym.node, MypyFile): message = 'Module "{}" is not valid as a type' notes.append("Perhaps you meant to use a protocol matching the module structure?") elif unbound_tvar: assert isinstance(sym.node, TypeVarLikeExpr) if sym.node.is_new_style: # PEP 695 type paramaters are never considered unbound -- they are undefined # in contexts where they aren't valid, such as in argument default values. message = 'Name "{}" is not defined' name = name.split(".")[-1] error_code = codes.NAME_DEFINED else: message = 'Type variable "{}" is unbound' short = name.split(".")[-1] notes.append( ( '(Hint: Use "Generic[{}]" or "Protocol[{}]" base class' ' to bind "{}" inside a class)' ).format(short, short, short) ) notes.append( '(Hint: Use "{}" in function signature to bind "{}"' " inside a function)".format(short, short) ) else: message = 'Cannot interpret reference "{}" as a type' if not defining_literal: # Literal check already gives a custom error. Avoid duplicating errors. self.fail(message.format(name), t, code=error_code) for note in notes: self.note(note, t, code=error_code) # TODO: Would it be better to always return Any instead of UnboundType # in case of an error? On one hand, UnboundType has a name so error messages # are more detailed, on the other hand, some of them may be bogus, # see https://github.com/python/mypy/issues/4987. return t def visit_any(self, t: AnyType) -> Type: return t def visit_none_type(self, t: NoneType) -> Type: return t def visit_uninhabited_type(self, t: UninhabitedType) -> Type: return t def visit_erased_type(self, t: ErasedType) -> Type: # This type should exist only temporarily during type inference assert False, "Internal error: Unexpected erased type" def visit_deleted_type(self, t: DeletedType) -> Type: return t def visit_type_list(self, t: TypeList) -> Type: # Parameters literal (Z[[int, str, Whatever]]) if self.allow_param_spec_literals: params = self.analyze_callable_args(t) if params: ts, kinds, names = params # bind these types return Parameters(self.anal_array(ts), kinds, names, line=t.line, column=t.column) else: return AnyType(TypeOfAny.from_error) else: self.fail( 'Bracketed expression "[...]" is not valid as a type', t, code=codes.VALID_TYPE ) if len(t.items) == 1: self.note('Did you mean "List[...]"?', t) return AnyType(TypeOfAny.from_error) def visit_callable_argument(self, t: CallableArgument) -> Type: self.fail("Invalid type", t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) def visit_instance(self, t: Instance) -> Type: return t def visit_type_alias_type(self, t: TypeAliasType) -> Type: # TODO: should we do something here? return t def visit_type_var(self, t: TypeVarType) -> Type: return t def visit_param_spec(self, t: ParamSpecType) -> Type: return t def visit_type_var_tuple(self, t: TypeVarTupleType) -> Type: return t def visit_unpack_type(self, t: UnpackType) -> Type: if not self.allow_unpack: self.fail(message_registry.INVALID_UNPACK_POSITION, t.type, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) self.allow_type_var_tuple = True result = UnpackType(self.anal_type(t.type), from_star_syntax=t.from_star_syntax) self.allow_type_var_tuple = False return result def visit_parameters(self, t: Parameters) -> Type: raise NotImplementedError("ParamSpec literals cannot have unbound TypeVars") def visit_callable_type( self, t: CallableType, nested: bool = True, namespace: str = "" ) -> Type: # Every Callable can bind its own type variables, if they're not in the outer scope # TODO: attach namespace for nested free type variables (these appear in return type only). with self.tvar_scope_frame(namespace=namespace): unpacked_kwargs = t.unpack_kwargs if self.defining_alias: variables = t.variables else: variables, _ = self.bind_function_type_variables(t, t) type_guard = self.anal_type_guard(t.ret_type) type_is = self.anal_type_is(t.ret_type) arg_kinds = t.arg_kinds if len(arg_kinds) >= 2 and arg_kinds[-2] == ARG_STAR and arg_kinds[-1] == ARG_STAR2: arg_types = self.anal_array(t.arg_types[:-2], nested=nested) + [ self.anal_star_arg_type(t.arg_types[-2], ARG_STAR, nested=nested), self.anal_star_arg_type(t.arg_types[-1], ARG_STAR2, nested=nested), ] # If nested is True, it means we are analyzing a Callable[...] type, rather # than a function definition type. We need to "unpack" ** TypedDict annotation # here (for function definitions it is done in semanal). if nested and isinstance(arg_types[-1], UnpackType): # TODO: it would be better to avoid this get_proper_type() call. unpacked = get_proper_type(arg_types[-1].type) if isinstance(unpacked, TypedDictType): arg_types[-1] = unpacked unpacked_kwargs = True arg_types = self.check_unpacks_in_list(arg_types) else: star_index = None if ARG_STAR in arg_kinds: star_index = arg_kinds.index(ARG_STAR) star2_index = None if ARG_STAR2 in arg_kinds: star2_index = arg_kinds.index(ARG_STAR2) arg_types = [] for i, ut in enumerate(t.arg_types): at = self.anal_type( ut, nested=nested, allow_unpack=i in (star_index, star2_index) ) if nested and isinstance(at, UnpackType) and i == star_index: # TODO: it would be better to avoid this get_proper_type() call. p_at = get_proper_type(at.type) if isinstance(p_at, TypedDictType) and not at.from_star_syntax: # Automatically detect Unpack[Foo] in Callable as backwards # compatible syntax for **Foo, if Foo is a TypedDict. at = p_at arg_kinds[i] = ARG_STAR2 unpacked_kwargs = True arg_types.append(at) if nested: arg_types = self.check_unpacks_in_list(arg_types) # If there were multiple (invalid) unpacks, the arg types list will become shorter, # we need to trim the kinds/names as well to avoid crashes. arg_kinds = t.arg_kinds[: len(arg_types)] arg_names = t.arg_names[: len(arg_types)] ret = t.copy_modified( arg_types=arg_types, arg_kinds=arg_kinds, arg_names=arg_names, ret_type=self.anal_type(t.ret_type, nested=nested), # If the fallback isn't filled in yet, # its type will be the falsey FakeInfo fallback=(t.fallback if t.fallback.type else self.named_type("builtins.function")), variables=self.anal_var_defs(variables), type_guard=type_guard, type_is=type_is, unpack_kwargs=unpacked_kwargs, ) return ret def anal_type_guard(self, t: Type) -> Type | None: if isinstance(t, UnboundType): sym = self.lookup_qualified(t.name, t) if sym is not None and sym.node is not None: return self.anal_type_guard_arg(t, sym.node.fullname) # TODO: What if it's an Instance? Then use t.type.fullname? return None def anal_type_guard_arg(self, t: UnboundType, fullname: str) -> Type | None: if fullname in ("typing_extensions.TypeGuard", "typing.TypeGuard"): if len(t.args) != 1: self.fail( "TypeGuard must have exactly one type argument", t, code=codes.VALID_TYPE ) return AnyType(TypeOfAny.from_error) return self.anal_type(t.args[0]) return None def anal_type_is(self, t: Type) -> Type | None: if isinstance(t, UnboundType): sym = self.lookup_qualified(t.name, t) if sym is not None and sym.node is not None: return self.anal_type_is_arg(t, sym.node.fullname) # TODO: What if it's an Instance? Then use t.type.fullname? return None def anal_type_is_arg(self, t: UnboundType, fullname: str) -> Type | None: if fullname in ("typing_extensions.TypeIs", "typing.TypeIs"): if len(t.args) != 1: self.fail("TypeIs must have exactly one type argument", t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) return self.anal_type(t.args[0]) return None def anal_star_arg_type(self, t: Type, kind: ArgKind, nested: bool) -> Type: """Analyze signature argument type for *args and **kwargs argument.""" if isinstance(t, UnboundType) and t.name and "." in t.name and not t.args: components = t.name.split(".") tvar_name = ".".join(components[:-1]) sym = self.lookup_qualified(tvar_name, t) if sym is not None and isinstance(sym.node, ParamSpecExpr): tvar_def = self.tvar_scope.get_binding(sym) if isinstance(tvar_def, ParamSpecType): if kind == ARG_STAR: make_paramspec = paramspec_args if components[-1] != "args": self.fail( f'Use "{tvar_name}.args" for variadic "*" parameter', t, code=codes.VALID_TYPE, ) elif kind == ARG_STAR2: make_paramspec = paramspec_kwargs if components[-1] != "kwargs": self.fail( f'Use "{tvar_name}.kwargs" for variadic "**" parameter', t, code=codes.VALID_TYPE, ) else: assert False, kind return make_paramspec( tvar_def.name, tvar_def.fullname, tvar_def.id, named_type_func=self.named_type, line=t.line, column=t.column, ) return self.anal_type(t, nested=nested, allow_unpack=True) def visit_overloaded(self, t: Overloaded) -> Type: # Overloaded types are manually constructed in semanal.py by analyzing the # AST and combining together the Callable types this visitor converts. # # So if we're ever asked to reanalyze an Overloaded type, we know it's # fine to just return it as-is. return t def visit_tuple_type(self, t: TupleType) -> Type: # Types such as (t1, t2, ...) only allowed in assignment statements. They'll # generate errors elsewhere, and Tuple[t1, t2, ...] must be used instead. if t.implicit and not self.allow_tuple_literal: self.fail("Syntax error in type annotation", t, code=codes.SYNTAX) if len(t.items) == 0: self.note( "Suggestion: Use Tuple[()] instead of () for an empty tuple, or " "None for a function without a return value", t, code=codes.SYNTAX, ) elif len(t.items) == 1: self.note("Suggestion: Is there a spurious trailing comma?", t, code=codes.SYNTAX) else: self.note( "Suggestion: Use Tuple[T1, ..., Tn] instead of (T1, ..., Tn)", t, code=codes.SYNTAX, ) return AnyType(TypeOfAny.from_error) any_type = AnyType(TypeOfAny.special_form) # If the fallback isn't filled in yet, its type will be the falsey FakeInfo fallback = ( t.partial_fallback if t.partial_fallback.type else self.named_type("builtins.tuple", [any_type]) ) return TupleType(self.anal_array(t.items, allow_unpack=True), fallback, t.line) def visit_typeddict_type(self, t: TypedDictType) -> Type: req_keys = set() readonly_keys = set() items = {} for item_name, item_type in t.items.items(): # TODO: rework analyzed = self.anal_type(item_type, allow_typed_dict_special_forms=True) if isinstance(analyzed, RequiredType): if analyzed.required: req_keys.add(item_name) analyzed = analyzed.item else: # Keys are required by default. req_keys.add(item_name) if isinstance(analyzed, ReadOnlyType): readonly_keys.add(item_name) analyzed = analyzed.item items[item_name] = analyzed if t.fallback.type is MISSING_FALLBACK: # anonymous/inline TypedDict if INLINE_TYPEDDICT not in self.options.enable_incomplete_feature: self.fail( "Inline TypedDict is experimental," " must be enabled with --enable-incomplete-feature=InlineTypedDict", t, ) required_keys = req_keys fallback = self.named_type("typing._TypedDict") for typ in t.extra_items_from: analyzed = self.analyze_type(typ) p_analyzed = get_proper_type(analyzed) if not isinstance(p_analyzed, TypedDictType): if not isinstance(p_analyzed, (AnyType, PlaceholderType)): self.fail("Can only merge-in other TypedDict", t, code=codes.VALID_TYPE) continue for sub_item_name, sub_item_type in p_analyzed.items.items(): if sub_item_name in items: self.fail(TYPEDDICT_OVERRIDE_MERGE.format(sub_item_name), t) continue items[sub_item_name] = sub_item_type if sub_item_name in p_analyzed.required_keys: req_keys.add(sub_item_name) if sub_item_name in p_analyzed.readonly_keys: readonly_keys.add(sub_item_name) else: required_keys = t.required_keys fallback = t.fallback return TypedDictType(items, required_keys, readonly_keys, fallback, t.line, t.column) def visit_raw_expression_type(self, t: RawExpressionType) -> Type: # We should never see a bare Literal. We synthesize these raw literals # in the earlier stages of semantic analysis, but those # "fake literals" should always be wrapped in an UnboundType # corresponding to 'Literal'. # # Note: if at some point in the distant future, we decide to # make signatures like "foo(x: 20) -> None" legal, we can change # this method so it generates and returns an actual LiteralType # instead. if self.report_invalid_types: if t.base_type_name in ("builtins.int", "builtins.bool"): # The only time it makes sense to use an int or bool is inside of # a literal type. msg = f"Invalid type: try using Literal[{repr(t.literal_value)}] instead?" elif t.base_type_name in ("builtins.float", "builtins.complex"): # We special-case warnings for floats and complex numbers. msg = f"Invalid type: {t.simple_name()} literals cannot be used as a type" else: # And in all other cases, we default to a generic error message. # Note: the reason why we use a generic error message for strings # but not ints or bools is because whenever we see an out-of-place # string, it's unclear if the user meant to construct a literal type # or just misspelled a regular type. So we avoid guessing. msg = "Invalid type comment or annotation" self.fail(msg, t, code=codes.VALID_TYPE) if t.note is not None: self.note(t.note, t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error, line=t.line, column=t.column) def visit_literal_type(self, t: LiteralType) -> Type: return t def visit_union_type(self, t: UnionType) -> Type: if ( t.uses_pep604_syntax is True and t.is_evaluated is True and not self.always_allow_new_syntax and not self.options.python_version >= (3, 10) ): self.fail("X | Y syntax for unions requires Python 3.10", t, code=codes.SYNTAX) return UnionType(self.anal_array(t.items), t.line, uses_pep604_syntax=t.uses_pep604_syntax) def visit_partial_type(self, t: PartialType) -> Type: assert False, "Internal error: Unexpected partial type" def visit_ellipsis_type(self, t: EllipsisType) -> Type: if self.allow_ellipsis or self.allow_param_spec_literals: any_type = AnyType(TypeOfAny.explicit) return Parameters( [any_type, any_type], [ARG_STAR, ARG_STAR2], [None, None], is_ellipsis_args=True ) else: self.fail('Unexpected "..."', t) return AnyType(TypeOfAny.from_error) def visit_type_type(self, t: TypeType) -> Type: return TypeType.make_normalized(self.anal_type(t.item), line=t.line) def visit_placeholder_type(self, t: PlaceholderType) -> Type: n = ( None # No dot in fullname indicates we are at function scope, and recursive # types are not supported there anyway, so we just give up. if not t.fullname or "." not in t.fullname else self.api.lookup_fully_qualified(t.fullname) ) if not n or isinstance(n.node, PlaceholderNode): self.api.defer() # Still incomplete return t else: # TODO: Handle non-TypeInfo assert isinstance(n.node, TypeInfo) return self.analyze_type_with_type_info(n.node, t.args, t, False) def analyze_callable_args_for_paramspec( self, callable_args: Type, ret_type: Type, fallback: Instance ) -> CallableType | None: """Construct a 'Callable[P, RET]', where P is ParamSpec, return None if we cannot.""" if not isinstance(callable_args, UnboundType): return None sym = self.lookup_qualified(callable_args.name, callable_args) if sym is None: return None tvar_def = self.tvar_scope.get_binding(sym) if not isinstance(tvar_def, ParamSpecType): if ( tvar_def is None and self.allow_unbound_tvars and isinstance(sym.node, ParamSpecExpr) ): # We are analyzing this type in runtime context (e.g. as type application). # If it is not valid as a type in this position an error will be given later. return callable_with_ellipsis( AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback ) return None elif ( self.defining_alias and self.not_declared_in_type_params(tvar_def.name) and tvar_def not in self.allowed_alias_tvars ): if self.python_3_12_type_alias: msg = message_registry.TYPE_PARAMETERS_SHOULD_BE_DECLARED.format( f'"{tvar_def.name}"' ) else: msg = f'ParamSpec "{tvar_def.name}" is not included in type_params' self.fail(msg, callable_args, code=codes.VALID_TYPE) return callable_with_ellipsis( AnyType(TypeOfAny.special_form), ret_type=ret_type, fallback=fallback ) return CallableType( [ paramspec_args( tvar_def.name, tvar_def.fullname, tvar_def.id, named_type_func=self.named_type ), paramspec_kwargs( tvar_def.name, tvar_def.fullname, tvar_def.id, named_type_func=self.named_type ), ], [nodes.ARG_STAR, nodes.ARG_STAR2], [None, None], ret_type=ret_type, fallback=fallback, ) def analyze_callable_args_for_concatenate( self, callable_args: Type, ret_type: Type, fallback: Instance ) -> CallableType | AnyType | None: """Construct a 'Callable[C, RET]', where C is Concatenate[..., P], returning None if we cannot. """ if not isinstance(callable_args, UnboundType): return None sym = self.lookup_qualified(callable_args.name, callable_args) if sym is None: return None if sym.node is None: return None if sym.node.fullname not in ("typing_extensions.Concatenate", "typing.Concatenate"): return None tvar_def = self.anal_type(callable_args, allow_param_spec=True) if not isinstance(tvar_def, (ParamSpecType, Parameters)): if self.allow_unbound_tvars and isinstance(tvar_def, UnboundType): sym = self.lookup_qualified(tvar_def.name, callable_args) if sym is not None and isinstance(sym.node, ParamSpecExpr): # We are analyzing this type in runtime context (e.g. as type application). # If it is not valid as a type in this position an error will be given later. return callable_with_ellipsis( AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback ) # Error was already given, so prevent further errors. return AnyType(TypeOfAny.from_error) if isinstance(tvar_def, Parameters): # This comes from Concatenate[int, ...] return CallableType( arg_types=tvar_def.arg_types, arg_names=tvar_def.arg_names, arg_kinds=tvar_def.arg_kinds, ret_type=ret_type, fallback=fallback, from_concatenate=True, ) # ick, CallableType should take ParamSpecType prefix = tvar_def.prefix # we don't set the prefix here as generic arguments will get updated at some point # in the future. CallableType.param_spec() accounts for this. return CallableType( [ *prefix.arg_types, paramspec_args( tvar_def.name, tvar_def.fullname, tvar_def.id, named_type_func=self.named_type ), paramspec_kwargs( tvar_def.name, tvar_def.fullname, tvar_def.id, named_type_func=self.named_type ), ], [*prefix.arg_kinds, nodes.ARG_STAR, nodes.ARG_STAR2], [*prefix.arg_names, None, None], ret_type=ret_type, fallback=fallback, from_concatenate=True, ) def analyze_callable_type(self, t: UnboundType) -> Type: fallback = self.named_type("builtins.function") if len(t.args) == 0: # Callable (bare). Treat as Callable[..., Any]. any_type = self.get_omitted_any(t) ret = callable_with_ellipsis(any_type, any_type, fallback) elif len(t.args) == 2: callable_args = t.args[0] ret_type = t.args[1] if isinstance(callable_args, TypeList): # Callable[[ARG, ...], RET] (ordinary callable type) analyzed_args = self.analyze_callable_args(callable_args) if analyzed_args is None: return AnyType(TypeOfAny.from_error) args, kinds, names = analyzed_args ret = CallableType(args, kinds, names, ret_type=ret_type, fallback=fallback) elif isinstance(callable_args, EllipsisType): # Callable[..., RET] (with literal ellipsis; accept arbitrary arguments) ret = callable_with_ellipsis( AnyType(TypeOfAny.explicit), ret_type=ret_type, fallback=fallback ) else: # Callable[P, RET] (where P is ParamSpec) with self.tvar_scope_frame(namespace=""): # Temporarily bind ParamSpecs to allow code like this: # my_fun: Callable[Q, Foo[Q]] # We usually do this later in visit_callable_type(), but the analysis # below happens at very early stage. variables = [] for name, tvar_expr in self.find_type_var_likes(callable_args): variables.append(self.tvar_scope.bind_new(name, tvar_expr)) maybe_ret = self.analyze_callable_args_for_paramspec( callable_args, ret_type, fallback ) or self.analyze_callable_args_for_concatenate( callable_args, ret_type, fallback ) if isinstance(maybe_ret, CallableType): maybe_ret = maybe_ret.copy_modified(variables=variables) if maybe_ret is None: # Callable[?, RET] (where ? is something invalid) self.fail( "The first argument to Callable must be a " 'list of types, parameter specification, or "..."', t, code=codes.VALID_TYPE, ) self.note( "See https://mypy.readthedocs.io/en/stable/kinds_of_types.html#callable-types-and-lambdas", t, ) return AnyType(TypeOfAny.from_error) elif isinstance(maybe_ret, AnyType): return maybe_ret ret = maybe_ret else: if self.options.disallow_any_generics: self.fail('Please use "Callable[[<parameters>], <return type>]"', t) else: self.fail('Please use "Callable[[<parameters>], <return type>]" or "Callable"', t) return AnyType(TypeOfAny.from_error) assert isinstance(ret, CallableType) return ret.accept(self) def refers_to_full_names(self, arg: UnboundType, names: Sequence[str]) -> bool: sym = self.lookup_qualified(arg.name, arg) if sym is not None: if sym.fullname in names: return True return False def analyze_callable_args( self, arglist: TypeList ) -> tuple[list[Type], list[ArgKind], list[str | None]] | None: args: list[Type] = [] kinds: list[ArgKind] = [] names: list[str | None] = [] seen_unpack = False unpack_types: list[Type] = [] invalid_unpacks: list[Type] = [] second_unpack_last = False for i, arg in enumerate(arglist.items): if isinstance(arg, CallableArgument): args.append(arg.typ) names.append(arg.name) if arg.constructor is None: return None found = self.lookup_qualified(arg.constructor, arg) if found is None: # Looking it up already put an error message in return None elif found.fullname not in ARG_KINDS_BY_CONSTRUCTOR: self.fail(f'Invalid argument constructor "{found.fullname}"', arg) return None else: assert found.fullname is not None kind = ARG_KINDS_BY_CONSTRUCTOR[found.fullname] kinds.append(kind) if arg.name is not None and kind.is_star(): self.fail(f"{arg.constructor} arguments should not have names", arg) return None elif ( isinstance(arg, UnboundType) and self.refers_to_full_names(arg, ("typing_extensions.Unpack", "typing.Unpack")) or isinstance(arg, UnpackType) ): if seen_unpack: # Multiple unpacks, preserve them, so we can give an error later. if i == len(arglist.items) - 1 and not invalid_unpacks: # Special case: if there are just two unpacks, and the second one appears # as last type argument, it can be still valid, if the second unpacked type # is a TypedDict. This should be checked by the caller. second_unpack_last = True invalid_unpacks.append(arg) continue seen_unpack = True unpack_types.append(arg) else: if seen_unpack: unpack_types.append(arg) else: args.append(arg) kinds.append(ARG_POS) names.append(None) if seen_unpack: if len(unpack_types) == 1: args.append(unpack_types[0]) else: first = unpack_types[0] if isinstance(first, UnpackType): # UnpackType doesn't have its own line/column numbers, # so use the unpacked type for error messages. first = first.type args.append( UnpackType(self.tuple_type(unpack_types, line=first.line, column=first.column)) ) kinds.append(ARG_STAR) names.append(None) for arg in invalid_unpacks: args.append(arg) kinds.append(ARG_STAR2 if second_unpack_last else ARG_STAR) names.append(None) # Note that arglist below is only used for error context. check_arg_names(names, [arglist] * len(args), self.fail, "Callable") check_arg_kinds(kinds, [arglist] * len(args), self.fail) return args, kinds, names def analyze_literal_type(self, t: UnboundType) -> Type: if len(t.args) == 0: self.fail("Literal[...] must have at least one parameter", t, code=codes.VALID_TYPE) return AnyType(TypeOfAny.from_error) output: list[Type] = [] for i, arg in enumerate(t.args): analyzed_types = self.analyze_literal_param(i + 1, arg, t) if analyzed_types is None: return AnyType(TypeOfAny.from_error) else: output.extend(analyzed_types) return UnionType.make_union(output, line=t.line) def analyze_literal_param(self, idx: int, arg: Type, ctx: Context) -> list[Type] | None: # This UnboundType was originally defined as a string. if ( isinstance(arg, ProperType) and isinstance(arg, (UnboundType, UnionType)) and arg.original_str_expr is not None ): assert arg.original_str_fallback is not None return [ LiteralType( value=arg.original_str_expr, fallback=self.named_type(arg.original_str_fallback), line=arg.line, column=arg.column, ) ] # If arg is an UnboundType that was *not* originally defined as # a string, try expanding it in case it's a type alias or something. if isinstance(arg, UnboundType): self.nesting_level += 1 try: arg = self.visit_unbound_type(arg, defining_literal=True) finally: self.nesting_level -= 1 # Literal[...] cannot contain Any. Give up and add an error message # (if we haven't already). arg = get_proper_type(arg) if isinstance(arg, AnyType): # Note: We can encounter Literals containing 'Any' under three circumstances: # # 1. If the user attempts use an explicit Any as a parameter # 2. If the user is trying to use an enum value imported from a module with # no type hints, giving it an implicit type of 'Any' # 3. If there's some other underlying problem with the parameter. # # We report an error in only the first two cases. In the third case, we assume # some other region of the code has already reported a more relevant error. # # TODO: Once we start adding support for enums, make sure we report a custom # error for case 2 as well. if arg.type_of_any not in (TypeOfAny.from_error, TypeOfAny.special_form): self.fail( f'Parameter {idx} of Literal[...] cannot be of type "Any"', ctx, code=codes.VALID_TYPE, ) return None elif isinstance(arg, RawExpressionType): # A raw literal. Convert it directly into a literal if we can. if arg.literal_value is None: name = arg.simple_name() if name in ("float", "complex"): msg = f'Parameter {idx} of Literal[...] cannot be of type "{name}"' else: msg = "Invalid type: Literal[...] cannot contain arbitrary expressions" self.fail(msg, ctx, code=codes.VALID_TYPE) # Note: we deliberately ignore arg.note here: the extra info might normally be # helpful, but it generally won't make sense in the context of a Literal[...]. return None # Remap bytes and unicode into the appropriate type for the correct Python version fallback = self.named_type(arg.base_type_name) assert isinstance(fallback, Instance) return [LiteralType(arg.literal_value, fallback, line=arg.line, column=arg.column)] elif isinstance(arg, (NoneType, LiteralType)): # Types that we can just add directly to the literal/potential union of literals. return [arg] elif isinstance(arg, Instance) and arg.last_known_value is not None: # Types generated from declarations like "var: Final = 4". return [arg.last_known_value] elif isinstance(arg, UnionType): out = [] for union_arg in arg.items: union_result = self.analyze_literal_param(idx, union_arg, ctx) if union_result is None: return None out.extend(union_result) return out else: self.fail(f"Parameter {idx} of Literal[...] is invalid", ctx, code=codes.VALID_TYPE) return None def analyze_type(self, t: Type) -> Type: return t.accept(self) def fail(self, msg: str, ctx: Context, *, code: ErrorCode | None = None) -> None: self.fail_func(msg, ctx, code=code) def note(self, msg: str, ctx: Context, *, code: ErrorCode | None = None) -> None: self.note_func(msg, ctx, code=code) @contextmanager def tvar_scope_frame(self, namespace: str) -> Iterator[None]: old_scope = self.tvar_scope self.tvar_scope = self.tvar_scope.method_frame(namespace) yield self.tvar_scope = old_scope def find_type_var_likes(self, t: Type) -> TypeVarLikeList: visitor = FindTypeVarVisitor(self.api, self.tvar_scope) t.accept(visitor) return visitor.type_var_likes def infer_type_variables( self, type: CallableType ) -> tuple[list[tuple[str, TypeVarLikeExpr]], bool]: """Infer type variables from a callable. Return tuple with these items: - list of unique type variables referred to in a callable - whether there is a reference to the Self type """ visitor = FindTypeVarVisitor(self.api, self.tvar_scope) for arg in type.arg_types: arg.accept(visitor) # When finding type variables in the return type of a function, don't # look inside Callable types. Type variables only appearing in # functions in the return type belong to those functions, not the # function we're currently analyzing. visitor.include_callables = False type.ret_type.accept(visitor) return visitor.type_var_likes, visitor.has_self_type def bind_function_type_variables( self, fun_type: CallableType, defn: Context ) -> tuple[Sequence[TypeVarLikeType], bool]: """Find the type variables of the function type and bind them in our tvar_scope""" has_self_type = False if fun_type.variables: defs = [] for var in fun_type.variables: if self.api.type and self.api.type.self_type and var == self.api.type.self_type: has_self_type = True continue var_node = self.lookup_qualified(var.name, defn) assert var_node, "Binding for function type variable not found within function" var_expr = var_node.node assert isinstance(var_expr, TypeVarLikeExpr) binding = self.tvar_scope.bind_new(var.name, var_expr) defs.append(binding) return defs, has_self_type typevars, has_self_type = self.infer_type_variables(fun_type) # Do not define a new type variable if already defined in scope. typevars = [ (name, tvar) for name, tvar in typevars if not self.is_defined_type_var(name, defn) ] defs = [] for name, tvar in typevars: if not self.tvar_scope.allow_binding(tvar.fullname): self.fail( f'Type variable "{name}" is bound by an outer class', defn, code=codes.VALID_TYPE, ) binding = self.tvar_scope.bind_new(name, tvar) defs.append(binding) return defs, has_self_type def is_defined_type_var(self, tvar: str, context: Context) -> bool: tvar_node = self.lookup_qualified(tvar, context) if not tvar_node: return False return self.tvar_scope.get_binding(tvar_node) is not None def anal_array( self, a: Iterable[Type], nested: bool = True, *, allow_param_spec: bool = False, allow_param_spec_literals: bool = False, allow_unpack: bool = False, ) -> list[Type]: old_allow_param_spec_literals = self.allow_param_spec_literals self.allow_param_spec_literals = allow_param_spec_literals res: list[Type] = [] for t in a: res.append( self.anal_type( t, nested, allow_param_spec=allow_param_spec, allow_unpack=allow_unpack ) ) self.allow_param_spec_literals = old_allow_param_spec_literals return self.check_unpacks_in_list(res) def anal_type( self, t: Type, nested: bool = True, *, allow_param_spec: bool = False, allow_unpack: bool = False, allow_ellipsis: bool = False, allow_typed_dict_special_forms: bool = False, ) -> Type: if nested: self.nesting_level += 1 old_allow_typed_dict_special_forms = self.allow_typed_dict_special_forms self.allow_typed_dict_special_forms = allow_typed_dict_special_forms old_allow_ellipsis = self.allow_ellipsis self.allow_ellipsis = allow_ellipsis old_allow_unpack = self.allow_unpack self.allow_unpack = allow_unpack try: analyzed = t.accept(self) finally: if nested: self.nesting_level -= 1 self.allow_typed_dict_special_forms = old_allow_typed_dict_special_forms self.allow_ellipsis = old_allow_ellipsis self.allow_unpack = old_allow_unpack if ( not allow_param_spec and isinstance(analyzed, ParamSpecType) and analyzed.flavor == ParamSpecFlavor.BARE ): if analyzed.prefix.arg_types: self.fail("Invalid location for Concatenate", t, code=codes.VALID_TYPE) self.note("You can use Concatenate as the first argument to Callable", t) analyzed = AnyType(TypeOfAny.from_error) else: self.fail( INVALID_PARAM_SPEC_LOCATION.format(format_type(analyzed, self.options)), t, code=codes.VALID_TYPE, ) self.note( INVALID_PARAM_SPEC_LOCATION_NOTE.format(analyzed.name), t, code=codes.VALID_TYPE, ) analyzed = AnyType(TypeOfAny.from_error) return analyzed def anal_var_def(self, var_def: TypeVarLikeType) -> TypeVarLikeType: if isinstance(var_def, TypeVarType): return TypeVarType( name=var_def.name, fullname=var_def.fullname, id=var_def.id, values=self.anal_array(var_def.values), upper_bound=var_def.upper_bound.accept(self), default=var_def.default.accept(self), variance=var_def.variance, line=var_def.line, column=var_def.column, ) else: return var_def def anal_var_defs(self, var_defs: Sequence[TypeVarLikeType]) -> list[TypeVarLikeType]: return [self.anal_var_def(vd) for vd in var_defs] def named_type( self, fully_qualified_name: str, args: list[Type] | None = None, line: int = -1, column: int = -1, ) -> Instance: node = self.lookup_fully_qualified(fully_qualified_name) assert isinstance(node.node, TypeInfo) any_type = AnyType(TypeOfAny.special_form) if args is not None: args = self.check_unpacks_in_list(args) return Instance( node.node, args or [any_type] * len(node.node.defn.type_vars), line=line, column=column ) def check_unpacks_in_list(self, items: list[Type]) -> list[Type]: new_items: list[Type] = [] num_unpacks = 0 final_unpack = None for item in items: # TODO: handle forward references here, they appear as Unpack[Any]. if isinstance(item, UnpackType) and not isinstance( get_proper_type(item.type), TupleType ): if not num_unpacks: new_items.append(item) num_unpacks += 1 final_unpack = item else: new_items.append(item) if num_unpacks > 1: assert final_unpack is not None self.fail("More than one Unpack in a type is not allowed", final_unpack) return new_items def tuple_type(self, items: list[Type], line: int, column: int) -> TupleType: any_type = AnyType(TypeOfAny.special_form) return TupleType( items, fallback=self.named_type("builtins.tuple", [any_type]), line=line, column=column ) TypeVarLikeList = List[Tuple[str, TypeVarLikeExpr]] class MsgCallback(Protocol): def __call__(self, __msg: str, __ctx: Context, *, code: ErrorCode | None = None) -> None: ... def get_omitted_any( disallow_any: bool, fail: MsgCallback, note: MsgCallback, orig_type: Type, options: Options, fullname: str | None = None, unexpanded_type: Type | None = None, ) -> AnyType: if disallow_any: nongen_builtins = get_nongen_builtins(options.python_version) if fullname in nongen_builtins: typ = orig_type # We use a dedicated error message for builtin generics (as the most common case). alternative = nongen_builtins[fullname] fail( message_registry.IMPLICIT_GENERIC_ANY_BUILTIN.format(alternative), typ, code=codes.TYPE_ARG, ) else: typ = unexpanded_type or orig_type type_str = typ.name if isinstance(typ, UnboundType) else format_type_bare(typ, options) fail( message_registry.BARE_GENERIC.format(quote_type_string(type_str)), typ, code=codes.TYPE_ARG, ) base_type = get_proper_type(orig_type) base_fullname = ( base_type.type.fullname if isinstance(base_type, Instance) else fullname ) # Ideally, we'd check whether the type is quoted or `from __future__ annotations` # is set before issuing this note if ( options.python_version < (3, 9) and base_fullname in GENERIC_STUB_NOT_AT_RUNTIME_TYPES ): # Recommend `from __future__ import annotations` or to put type in quotes # (string literal escaping) for classes not generic at runtime note( "Subscripting classes that are not generic at runtime may require " "escaping, see https://mypy.readthedocs.io/en/stable/runtime_troubles.html" "#not-generic-runtime", typ, code=codes.TYPE_ARG, ) any_type = AnyType(TypeOfAny.from_error, line=typ.line, column=typ.column) else: any_type = AnyType( TypeOfAny.from_omitted_generics, line=orig_type.line, column=orig_type.column ) return any_type def fix_type_var_tuple_argument(t: Instance) -> None: if t.type.has_type_var_tuple_type: args = list(t.args) assert t.type.type_var_tuple_prefix is not None tvt = t.type.defn.type_vars[t.type.type_var_tuple_prefix] assert isinstance(tvt, TypeVarTupleType) args[t.type.type_var_tuple_prefix] = UnpackType( Instance(tvt.tuple_fallback.type, [args[t.type.type_var_tuple_prefix]]) ) t.args = tuple(args) def fix_instance( t: Instance, fail: MsgCallback, note: MsgCallback, disallow_any: bool, options: Options, use_generic_error: bool = False, unexpanded_type: Type | None = None, ) -> None: """Fix a malformed instance by replacing all type arguments with TypeVar default or Any. Also emit a suitable error if this is not due to implicit Any's. """ arg_count = len(t.args) min_tv_count = sum(not tv.has_default() for tv in t.type.defn.type_vars) max_tv_count = len(t.type.type_vars) if arg_count < min_tv_count or arg_count > max_tv_count: # Don't use existing args if arg_count doesn't match if arg_count > max_tv_count: # Already wrong arg count error, don't emit missing type parameters error as well. disallow_any = False t.args = () arg_count = 0 args: list[Type] = [*(t.args[:max_tv_count])] any_type: AnyType | None = None env: dict[TypeVarId, Type] = {} for tv, arg in itertools.zip_longest(t.type.defn.type_vars, t.args, fillvalue=None): if tv is None: continue if arg is None: if tv.has_default(): arg = tv.default else: if any_type is None: fullname = None if use_generic_error else t.type.fullname any_type = get_omitted_any( disallow_any, fail, note, t, options, fullname, unexpanded_type ) arg = any_type args.append(arg) env[tv.id] = arg t.args = tuple(args) fix_type_var_tuple_argument(t) if not t.type.has_type_var_tuple_type: with state.strict_optional_set(options.strict_optional): fixed = expand_type(t, env) assert isinstance(fixed, Instance) t.args = fixed.args def instantiate_type_alias( node: TypeAlias, args: list[Type], fail: MsgCallback, no_args: bool, ctx: Context, options: Options, *, unexpanded_type: Type | None = None, disallow_any: bool = False, use_standard_error: bool = False, empty_tuple_index: bool = False, ) -> Type: """Create an instance of a (generic) type alias from alias node and type arguments. We are following the rules outlined in TypeAlias docstring. Here: node: type alias node (definition) args: type arguments (types to be substituted in place of type variables when expanding the alias) fail: error reporter callback no_args: whether original definition used a bare generic `A = List` ctx: context where expansion happens unexpanded_type, disallow_any, use_standard_error: used to customize error messages """ # Type aliases are special, since they can be expanded during semantic analysis, # so we need to normalize them as soon as possible. # TODO: can this cause an infinite recursion? args = flatten_nested_tuples(args) if any(unknown_unpack(a) for a in args): # This type is not ready to be validated, because of unknown total count. # Note that we keep the kind of Any for consistency. return set_any_tvars(node, [], ctx.line, ctx.column, options, special_form=True) max_tv_count = len(node.alias_tvars) act_len = len(args) if ( max_tv_count > 0 and act_len == 0 and not (empty_tuple_index and node.tvar_tuple_index is not None) ): # Interpret bare Alias same as normal generic, i.e., Alias[Any, Any, ...] return set_any_tvars( node, args, ctx.line, ctx.column, options, disallow_any=disallow_any, fail=fail, unexpanded_type=unexpanded_type, ) if max_tv_count == 0 and act_len == 0: if no_args: assert isinstance(node.target, Instance) # type: ignore[misc] # Note: this is the only case where we use an eager expansion. See more info about # no_args aliases like L = List in the docstring for TypeAlias class. return Instance(node.target.type, [], line=ctx.line, column=ctx.column) return TypeAliasType(node, [], line=ctx.line, column=ctx.column) if ( max_tv_count == 0 and act_len > 0 and isinstance(node.target, Instance) # type: ignore[misc] and no_args ): tp = Instance(node.target.type, args) tp.line = ctx.line tp.column = ctx.column return tp if node.tvar_tuple_index is None: if any(isinstance(a, UnpackType) for a in args): # A variadic unpack in fixed size alias (fixed unpacks must be flattened by the caller) fail(message_registry.INVALID_UNPACK_POSITION, ctx, code=codes.VALID_TYPE) return set_any_tvars(node, [], ctx.line, ctx.column, options, from_error=True) min_tv_count = sum(not tv.has_default() for tv in node.alias_tvars) fill_typevars = act_len != max_tv_count correct = min_tv_count <= act_len <= max_tv_count else: min_tv_count = sum( not tv.has_default() and not isinstance(tv, TypeVarTupleType) for tv in node.alias_tvars ) correct = act_len >= min_tv_count for a in args: if isinstance(a, UnpackType): unpacked = get_proper_type(a.type) if isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple": # Variadic tuple is always correct. correct = True fill_typevars = not correct if fill_typevars: if not correct: if use_standard_error: # This is used if type alias is an internal representation of another type, # for example a generic TypedDict or NamedTuple. msg = wrong_type_arg_count(max_tv_count, max_tv_count, str(act_len), node.name) else: if node.tvar_tuple_index is not None: msg = ( "Bad number of arguments for type alias," f" expected at least {min_tv_count}, given {act_len}" ) elif min_tv_count != max_tv_count: msg = ( "Bad number of arguments for type alias," f" expected between {min_tv_count} and {max_tv_count}, given {act_len}" ) else: msg = ( "Bad number of arguments for type alias," f" expected {min_tv_count}, given {act_len}" ) fail(msg, ctx, code=codes.TYPE_ARG) args = [] return set_any_tvars(node, args, ctx.line, ctx.column, options, from_error=True) elif node.tvar_tuple_index is not None: # We also need to check if we are not performing a type variable tuple split. unpack = find_unpack_in_list(args) if unpack is not None: unpack_arg = args[unpack] assert isinstance(unpack_arg, UnpackType) if isinstance(unpack_arg.type, TypeVarTupleType): exp_prefix = node.tvar_tuple_index act_prefix = unpack exp_suffix = len(node.alias_tvars) - node.tvar_tuple_index - 1 act_suffix = len(args) - unpack - 1 if act_prefix < exp_prefix or act_suffix < exp_suffix: fail("TypeVarTuple cannot be split", ctx, code=codes.TYPE_ARG) return set_any_tvars(node, [], ctx.line, ctx.column, options, from_error=True) # TODO: we need to check args validity w.r.t alias.alias_tvars. # Otherwise invalid instantiations will be allowed in runtime context. # Note: in type context, these will be still caught by semanal_typeargs. typ = TypeAliasType(node, args, ctx.line, ctx.column) assert typ.alias is not None # HACK: Implement FlexibleAlias[T, typ] by expanding it to typ here. if ( isinstance(typ.alias.target, Instance) # type: ignore[misc] and typ.alias.target.type.fullname == "mypy_extensions.FlexibleAlias" ): exp = get_proper_type(typ) assert isinstance(exp, Instance) return exp.args[-1] return typ def set_any_tvars( node: TypeAlias, args: list[Type], newline: int, newcolumn: int, options: Options, *, from_error: bool = False, disallow_any: bool = False, special_form: bool = False, fail: MsgCallback | None = None, unexpanded_type: Type | None = None, ) -> TypeAliasType: if from_error or disallow_any: type_of_any = TypeOfAny.from_error elif special_form: type_of_any = TypeOfAny.special_form else: type_of_any = TypeOfAny.from_omitted_generics any_type = AnyType(type_of_any, line=newline, column=newcolumn) env: dict[TypeVarId, Type] = {} used_any_type = False has_type_var_tuple_type = False for tv, arg in itertools.zip_longest(node.alias_tvars, args, fillvalue=None): if tv is None: continue if arg is None: if tv.has_default(): arg = tv.default else: arg = any_type used_any_type = True if isinstance(tv, TypeVarTupleType): # TODO Handle TypeVarTuple defaults has_type_var_tuple_type = True arg = UnpackType(Instance(tv.tuple_fallback.type, [any_type])) args.append(arg) env[tv.id] = arg t = TypeAliasType(node, args, newline, newcolumn) if not has_type_var_tuple_type: fixed = expand_type(t, env) assert isinstance(fixed, TypeAliasType) t.args = fixed.args if used_any_type and disallow_any and node.alias_tvars: assert fail is not None if unexpanded_type: type_str = ( unexpanded_type.name if isinstance(unexpanded_type, UnboundType) else format_type_bare(unexpanded_type, options) ) else: type_str = node.name fail( message_registry.BARE_GENERIC.format(quote_type_string(type_str)), Context(newline, newcolumn), code=codes.TYPE_ARG, ) return t def flatten_tvars(lists: list[list[T]]) -> list[T]: result: list[T] = [] for lst in lists: for item in lst: if item not in result: result.append(item) return result class DivergingAliasDetector(TrivialSyntheticTypeTranslator): """See docstring of detect_diverging_alias() for details.""" # TODO: this doesn't really need to be a translator, but we don't have a trivial visitor. def __init__( self, seen_nodes: set[TypeAlias], lookup: Callable[[str, Context], SymbolTableNode | None], scope: TypeVarLikeScope, ) -> None: super().__init__() self.seen_nodes = seen_nodes self.lookup = lookup self.scope = scope self.diverging = False def visit_type_alias_type(self, t: TypeAliasType) -> Type: assert t.alias is not None, f"Unfixed type alias {t.type_ref}" if t.alias in self.seen_nodes: for arg in t.args: if not ( isinstance(arg, TypeVarLikeType) or isinstance(arg, UnpackType) and isinstance(arg.type, TypeVarLikeType) ) and has_type_vars(arg): self.diverging = True return t # All clear for this expansion chain. return t new_nodes = self.seen_nodes | {t.alias} visitor = DivergingAliasDetector(new_nodes, self.lookup, self.scope) _ = get_proper_type(t).accept(visitor) if visitor.diverging: self.diverging = True return t def detect_diverging_alias( node: TypeAlias, target: Type, lookup: Callable[[str, Context], SymbolTableNode | None], scope: TypeVarLikeScope, ) -> bool: """This detects type aliases that will diverge during type checking. For example F = Something[..., F[List[T]]]. At each expansion step this will produce *new* type aliases: e.g. F[List[int]], F[List[List[int]]], etc. So we can't detect recursion. It is a known problem in the literature, recursive aliases and generic types don't always go well together. It looks like there is no known systematic solution yet. # TODO: should we handle such aliases using type_recursion counter and some large limit? They may be handy in rare cases, e.g. to express a union of non-mixed nested lists: Nested = Union[T, Nested[List[T]]] ~> Union[T, List[T], List[List[T]], ...] """ visitor = DivergingAliasDetector({node}, lookup, scope) _ = target.accept(visitor) return visitor.diverging def check_for_explicit_any( typ: Type | None, options: Options, is_typeshed_stub: bool, msg: MessageBuilder, context: Context, ) -> None: if options.disallow_any_explicit and not is_typeshed_stub and typ and has_explicit_any(typ): msg.explicit_any(context) def has_explicit_any(t: Type) -> bool: """ Whether this type is or type it contains is an Any coming from explicit type annotation """ return t.accept(HasExplicitAny()) class HasExplicitAny(TypeQuery[bool]): def __init__(self) -> None: super().__init__(any) def visit_any(self, t: AnyType) -> bool: return t.type_of_any == TypeOfAny.explicit def visit_typeddict_type(self, t: TypedDictType) -> bool: # typeddict is checked during TypedDict declaration, so don't typecheck it here. return False def has_any_from_unimported_type(t: Type) -> bool: """Return true if this type is Any because an import was not followed. If type t is such Any type or has type arguments that contain such Any type this function will return true. """ return t.accept(HasAnyFromUnimportedType()) class HasAnyFromUnimportedType(BoolTypeQuery): def __init__(self) -> None: super().__init__(ANY_STRATEGY) def visit_any(self, t: AnyType) -> bool: return t.type_of_any == TypeOfAny.from_unimported_type def visit_typeddict_type(self, t: TypedDictType) -> bool: # typeddict is checked during TypedDict declaration, so don't typecheck it here return False def collect_all_inner_types(t: Type) -> list[Type]: """ Return all types that `t` contains """ return t.accept(CollectAllInnerTypesQuery()) class CollectAllInnerTypesQuery(TypeQuery[List[Type]]): def __init__(self) -> None: super().__init__(self.combine_lists_strategy) def query_types(self, types: Iterable[Type]) -> list[Type]: return self.strategy([t.accept(self) for t in types]) + list(types) @classmethod def combine_lists_strategy(cls, it: Iterable[list[Type]]) -> list[Type]: return list(itertools.chain.from_iterable(it)) def make_optional_type(t: Type) -> Type: """Return the type corresponding to Optional[t]. Note that we can't use normal union simplification, since this function is called during semantic analysis and simplification only works during type checking. """ if isinstance(t, ProperType) and isinstance(t, NoneType): return t elif isinstance(t, ProperType) and isinstance(t, UnionType): # Eagerly expanding aliases is not safe during semantic analysis. items = [item for item in t.items if not isinstance(get_proper_type(item), NoneType)] return UnionType(items + [NoneType()], t.line, t.column) else: return UnionType([t, NoneType()], t.line, t.column) def validate_instance(t: Instance, fail: MsgCallback, empty_tuple_index: bool) -> bool: """Check if this is a well-formed instance with respect to argument count/positions.""" # TODO: combine logic with instantiate_type_alias(). if any(unknown_unpack(a) for a in t.args): # This type is not ready to be validated, because of unknown total count. # TODO: is it OK to fill with TypeOfAny.from_error instead of special form? return False if t.type.has_type_var_tuple_type: min_tv_count = sum( not tv.has_default() and not isinstance(tv, TypeVarTupleType) for tv in t.type.defn.type_vars ) correct = len(t.args) >= min_tv_count if any( isinstance(a, UnpackType) and isinstance(get_proper_type(a.type), Instance) for a in t.args ): correct = True if not t.args: if not (empty_tuple_index and len(t.type.type_vars) == 1): # The Any arguments should be set by the caller. if empty_tuple_index and min_tv_count: fail( f"At least {min_tv_count} type argument(s) expected, none given", t, code=codes.TYPE_ARG, ) return False elif not correct: fail( f"Bad number of arguments, expected: at least {min_tv_count}, given: {len(t.args)}", t, code=codes.TYPE_ARG, ) return False else: # We also need to check if we are not performing a type variable tuple split. unpack = find_unpack_in_list(t.args) if unpack is not None: unpack_arg = t.args[unpack] assert isinstance(unpack_arg, UnpackType) if isinstance(unpack_arg.type, TypeVarTupleType): assert t.type.type_var_tuple_prefix is not None assert t.type.type_var_tuple_suffix is not None exp_prefix = t.type.type_var_tuple_prefix act_prefix = unpack exp_suffix = t.type.type_var_tuple_suffix act_suffix = len(t.args) - unpack - 1 if act_prefix < exp_prefix or act_suffix < exp_suffix: fail("TypeVarTuple cannot be split", t, code=codes.TYPE_ARG) return False elif any(isinstance(a, UnpackType) for a in t.args): # A variadic unpack in fixed size instance (fixed unpacks must be flattened by the caller) fail(message_registry.INVALID_UNPACK_POSITION, t, code=codes.VALID_TYPE) t.args = () return False elif len(t.args) != len(t.type.type_vars): # Invalid number of type parameters. arg_count = len(t.args) min_tv_count = sum(not tv.has_default() for tv in t.type.defn.type_vars) max_tv_count = len(t.type.type_vars) if arg_count and (arg_count < min_tv_count or arg_count > max_tv_count): fail( wrong_type_arg_count(min_tv_count, max_tv_count, str(arg_count), t.type.name), t, code=codes.TYPE_ARG, ) t.invalid = True return False return True def find_self_type(typ: Type, lookup: Callable[[str], SymbolTableNode | None]) -> bool: return typ.accept(HasSelfType(lookup)) class HasSelfType(BoolTypeQuery): def __init__(self, lookup: Callable[[str], SymbolTableNode | None]) -> None: self.lookup = lookup super().__init__(ANY_STRATEGY) def visit_unbound_type(self, t: UnboundType) -> bool: sym = self.lookup(t.name) if sym and sym.fullname in SELF_TYPE_NAMES: return True return super().visit_unbound_type(t) def unknown_unpack(t: Type) -> bool: """Check if a given type is an unpack of an unknown type. Unfortunately, there is no robust way to distinguish forward references from genuine undefined names here. But this worked well so far, although it looks quite fragile. """ if isinstance(t, UnpackType): unpacked = get_proper_type(t.type) if isinstance(unpacked, AnyType) and unpacked.type_of_any == TypeOfAny.special_form: return True return False class FindTypeVarVisitor(SyntheticTypeVisitor[None]): """Type visitor that looks for type variable types and self types.""" def __init__(self, api: SemanticAnalyzerCoreInterface, scope: TypeVarLikeScope) -> None: self.api = api self.scope = scope self.type_var_likes: list[tuple[str, TypeVarLikeExpr]] = [] self.has_self_type = False self.seen_aliases: set[TypeAliasType] | None = None self.include_callables = True def _seems_like_callable(self, type: UnboundType) -> bool: if not type.args: return False return isinstance(type.args[0], (EllipsisType, TypeList, ParamSpecType)) def visit_unbound_type(self, t: UnboundType) -> None: name = t.name node = None # Special case P.args and P.kwargs for ParamSpecs only. if name.endswith("args"): if name.endswith((".args", ".kwargs")): base = ".".join(name.split(".")[:-1]) n = self.api.lookup_qualified(base, t) if n is not None and isinstance(n.node, ParamSpecExpr): node = n name = base if node is None: node = self.api.lookup_qualified(name, t) if node and node.fullname in SELF_TYPE_NAMES: self.has_self_type = True if ( node and isinstance(node.node, TypeVarLikeExpr) and self.scope.get_binding(node) is None ): if (name, node.node) not in self.type_var_likes: self.type_var_likes.append((name, node.node)) elif not self.include_callables and self._seems_like_callable(t): if find_self_type( t, lambda name: self.api.lookup_qualified(name, t, suppress_errors=True) ): self.has_self_type = True return elif node and node.fullname in LITERAL_TYPE_NAMES: return elif node and node.fullname in ANNOTATED_TYPE_NAMES and t.args: # Don't query the second argument to Annotated for TypeVars self.process_types([t.args[0]]) elif t.args: self.process_types(t.args) def visit_type_list(self, t: TypeList) -> None: self.process_types(t.items) def visit_callable_argument(self, t: CallableArgument) -> None: t.typ.accept(self) def visit_any(self, t: AnyType) -> None: pass def visit_uninhabited_type(self, t: UninhabitedType) -> None: pass def visit_none_type(self, t: NoneType) -> None: pass def visit_erased_type(self, t: ErasedType) -> None: pass def visit_deleted_type(self, t: DeletedType) -> None: pass def visit_type_var(self, t: TypeVarType) -> None: self.process_types([t.upper_bound, t.default] + t.values) def visit_param_spec(self, t: ParamSpecType) -> None: self.process_types([t.upper_bound, t.default]) def visit_type_var_tuple(self, t: TypeVarTupleType) -> None: self.process_types([t.upper_bound, t.default]) def visit_unpack_type(self, t: UnpackType) -> None: self.process_types([t.type]) def visit_parameters(self, t: Parameters) -> None: self.process_types(t.arg_types) def visit_partial_type(self, t: PartialType) -> None: pass def visit_instance(self, t: Instance) -> None: self.process_types(t.args) def visit_callable_type(self, t: CallableType) -> None: # FIX generics self.process_types(t.arg_types) t.ret_type.accept(self) def visit_tuple_type(self, t: TupleType) -> None: self.process_types(t.items) def visit_typeddict_type(self, t: TypedDictType) -> None: self.process_types(list(t.items.values())) def visit_raw_expression_type(self, t: RawExpressionType) -> None: pass def visit_literal_type(self, t: LiteralType) -> None: pass def visit_union_type(self, t: UnionType) -> None: self.process_types(t.items) def visit_overloaded(self, t: Overloaded) -> None: self.process_types(t.items) # type: ignore[arg-type] def visit_type_type(self, t: TypeType) -> None: t.item.accept(self) def visit_ellipsis_type(self, t: EllipsisType) -> None: pass def visit_placeholder_type(self, t: PlaceholderType) -> None: return self.process_types(t.args) def visit_type_alias_type(self, t: TypeAliasType) -> None: # Skip type aliases in already visited types to avoid infinite recursion. if self.seen_aliases is None: self.seen_aliases = set() elif t in self.seen_aliases: return self.seen_aliases.add(t) self.process_types(t.args) def process_types(self, types: list[Type] | tuple[Type, ...]) -> None: # Redundant type check helps mypyc. if isinstance(types, list): for t in types: t.accept(self) else: for t in types: t.accept(self) class TypeVarDefaultTranslator(TrivialSyntheticTypeTranslator): """Type translate visitor that replaces UnboundTypes with in-scope TypeVars.""" def __init__( self, api: SemanticAnalyzerInterface, tvar_expr_name: str, context: Context ) -> None: super().__init__() self.api = api self.tvar_expr_name = tvar_expr_name self.context = context def visit_unbound_type(self, t: UnboundType) -> Type: sym = self.api.lookup_qualified(t.name, t, suppress_errors=True) if sym is not None: if type_var := self.api.tvar_scope.get_binding(sym): return type_var if isinstance(sym.node, TypeVarLikeExpr): self.api.fail( f'Type parameter "{self.tvar_expr_name}" has a default type ' "that refers to one or more type variables that are out of scope", self.context, ) return AnyType(TypeOfAny.from_error) return super().visit_unbound_type(t) def visit_type_alias_type(self, t: TypeAliasType) -> Type: # TypeAliasTypes are analyzed separately already, just return it return t
algorandfoundation/puya
src/puyapy/_vendor/mypy/typeanal.py
Python
NOASSERTION
115,252
"""Miscellaneous type operations and helpers for use during type checking. NOTE: These must not be accessed from mypy.nodes or mypy.types to avoid import cycles. These must not be called from the semantic analysis main pass since these may assume that MROs are ready. """ from __future__ import annotations import itertools from typing import Any, Iterable, List, Sequence, TypeVar, cast from mypy.copytype import copy_type from mypy.expandtype import expand_type, expand_type_by_instance from mypy.maptype import map_instance_to_supertype from mypy.nodes import ( ARG_OPT, ARG_POS, ARG_STAR, ARG_STAR2, SYMBOL_FUNCBASE_TYPES, Decorator, Expression, FuncBase, FuncDef, FuncItem, OverloadedFuncDef, StrExpr, TypeInfo, Var, ) from mypy.state import state from mypy.types import ( ENUM_REMOVED_PROPS, AnyType, CallableType, ExtraAttrs, FormalArgument, FunctionLike, Instance, LiteralType, NoneType, NormalizedCallableType, Overloaded, Parameters, ParamSpecType, PartialType, ProperType, TupleType, Type, TypeAliasType, TypedDictType, TypeOfAny, TypeQuery, TypeType, TypeVarLikeType, TypeVarTupleType, TypeVarType, UninhabitedType, UnionType, UnpackType, flatten_nested_unions, get_proper_type, get_proper_types, ) from mypy.typevars import fill_typevars def is_recursive_pair(s: Type, t: Type) -> bool: """Is this a pair of recursive types? There may be more cases, and we may be forced to use e.g. has_recursive_types() here, but this function is called in very hot code, so we try to keep it simple and return True only in cases we know may have problems. """ if isinstance(s, TypeAliasType) and s.is_recursive: return ( isinstance(get_proper_type(t), (Instance, UnionType)) or isinstance(t, TypeAliasType) and t.is_recursive # Tuple types are special, they can cause an infinite recursion even if # the other type is not recursive, because of the tuple fallback that is # calculated "on the fly". or isinstance(get_proper_type(s), TupleType) ) if isinstance(t, TypeAliasType) and t.is_recursive: return ( isinstance(get_proper_type(s), (Instance, UnionType)) or isinstance(s, TypeAliasType) and s.is_recursive # Same as above. or isinstance(get_proper_type(t), TupleType) ) return False def tuple_fallback(typ: TupleType) -> Instance: """Return fallback type for a tuple.""" info = typ.partial_fallback.type if info.fullname != "builtins.tuple": return typ.partial_fallback items = [] for item in typ.items: if isinstance(item, UnpackType): unpacked_type = get_proper_type(item.type) if isinstance(unpacked_type, TypeVarTupleType): unpacked_type = get_proper_type(unpacked_type.upper_bound) if ( isinstance(unpacked_type, Instance) and unpacked_type.type.fullname == "builtins.tuple" ): items.append(unpacked_type.args[0]) else: raise NotImplementedError else: items.append(item) return Instance( info, # Note: flattening recursive unions is dangerous, since it can fool recursive # types optimization in subtypes.py and go into infinite recursion. [make_simplified_union(items, handle_recursive=False)], extra_attrs=typ.partial_fallback.extra_attrs, ) def get_self_type(func: CallableType, default_self: Instance | TupleType) -> Type | None: if isinstance(get_proper_type(func.ret_type), UninhabitedType): return func.ret_type elif func.arg_types and func.arg_types[0] != default_self and func.arg_kinds[0] == ARG_POS: return func.arg_types[0] else: return None def type_object_type_from_function( signature: FunctionLike, info: TypeInfo, def_info: TypeInfo, fallback: Instance, is_new: bool ) -> FunctionLike: # We first need to record all non-trivial (explicit) self types in __init__, # since they will not be available after we bind them. Note, we use explicit # self-types only in the defining class, similar to __new__ (but not exactly the same, # see comment in class_callable below). This is mostly useful for annotating library # classes such as subprocess.Popen. default_self = fill_typevars(info) if not is_new and not info.is_newtype: orig_self_types = [get_self_type(it, default_self) for it in signature.items] else: orig_self_types = [None] * len(signature.items) # The __init__ method might come from a generic superclass 'def_info' # with type variables that do not map identically to the type variables of # the class 'info' being constructed. For example: # # class A(Generic[T]): # def __init__(self, x: T) -> None: ... # class B(A[List[T]]): # ... # # We need to map B's __init__ to the type (List[T]) -> None. signature = bind_self( signature, original_type=default_self, is_classmethod=is_new, # Explicit instance self annotations have special handling in class_callable(), # we don't need to bind any type variables in them if they are generic. ignore_instances=True, ) signature = cast(FunctionLike, map_type_from_supertype(signature, info, def_info)) special_sig: str | None = None if def_info.fullname == "builtins.dict": # Special signature! special_sig = "dict" if isinstance(signature, CallableType): return class_callable(signature, info, fallback, special_sig, is_new, orig_self_types[0]) else: # Overloaded __init__/__new__. assert isinstance(signature, Overloaded) items: list[CallableType] = [] for item, orig_self in zip(signature.items, orig_self_types): items.append(class_callable(item, info, fallback, special_sig, is_new, orig_self)) return Overloaded(items) def class_callable( init_type: CallableType, info: TypeInfo, type_type: Instance, special_sig: str | None, is_new: bool, orig_self_type: Type | None = None, ) -> CallableType: """Create a type object type based on the signature of __init__.""" variables: list[TypeVarLikeType] = [] variables.extend(info.defn.type_vars) variables.extend(init_type.variables) from mypy.subtypes import is_subtype init_ret_type = get_proper_type(init_type.ret_type) orig_self_type = get_proper_type(orig_self_type) default_ret_type = fill_typevars(info) explicit_type = init_ret_type if is_new else orig_self_type if ( isinstance(explicit_type, (Instance, TupleType, UninhabitedType)) # We have to skip protocols, because it can be a subtype of a return type # by accident. Like `Hashable` is a subtype of `object`. See #11799 and isinstance(default_ret_type, Instance) and not default_ret_type.type.is_protocol # Only use the declared return type from __new__ or declared self in __init__ # if it is actually returning a subtype of what we would return otherwise. and is_subtype(explicit_type, default_ret_type, ignore_type_params=True) ): ret_type: Type = explicit_type else: ret_type = default_ret_type callable_type = init_type.copy_modified( ret_type=ret_type, fallback=type_type, name=None, variables=variables, special_sig=special_sig, ) c = callable_type.with_name(info.name) return c def map_type_from_supertype(typ: Type, sub_info: TypeInfo, super_info: TypeInfo) -> Type: """Map type variables in a type defined in a supertype context to be valid in the subtype context. Assume that the result is unique; if more than one type is possible, return one of the alternatives. For example, assume class D(Generic[S]): ... class C(D[E[T]], Generic[T]): ... Now S in the context of D would be mapped to E[T] in the context of C. """ # Create the type of self in subtype, of form t[a1, ...]. inst_type = fill_typevars(sub_info) if isinstance(inst_type, TupleType): inst_type = tuple_fallback(inst_type) # Map the type of self to supertype. This gets us a description of the # supertype type variables in terms of subtype variables, i.e. t[t1, ...] # so that any type variables in tN are to be interpreted in subtype # context. inst_type = map_instance_to_supertype(inst_type, super_info) # Finally expand the type variables in type with those in the previously # constructed type. Note that both type and inst_type may have type # variables, but in type they are interpreted in supertype context while # in inst_type they are interpreted in subtype context. This works even if # the names of type variables in supertype and subtype overlap. return expand_type_by_instance(typ, inst_type) def supported_self_type( typ: ProperType, allow_callable: bool = True, allow_instances: bool = True ) -> bool: """Is this a supported kind of explicit self-types? Currently, this means an X or Type[X], where X is an instance or a type variable with an instance upper bound. """ if isinstance(typ, TypeType): return supported_self_type(typ.item) if allow_callable and isinstance(typ, CallableType): # Special case: allow class callable instead of Type[...] as cls annotation, # as well as callable self for callback protocols. return True return isinstance(typ, TypeVarType) or ( allow_instances and isinstance(typ, Instance) and typ != fill_typevars(typ.type) ) F = TypeVar("F", bound=FunctionLike) def bind_self( method: F, original_type: Type | None = None, is_classmethod: bool = False, ignore_instances: bool = False, ) -> F: """Return a copy of `method`, with the type of its first parameter (usually self or cls) bound to original_type. If the type of `self` is a generic type (T, or Type[T] for classmethods), instantiate every occurrence of type with original_type in the rest of the signature and in the return type. original_type is the type of E in the expression E.copy(). It is None in compatibility checks. In this case we treat it as the erasure of the declared type of self. This way we can express "the type of self". For example: T = TypeVar('T', bound='A') class A: def copy(self: T) -> T: ... class B(A): pass b = B().copy() # type: B """ if isinstance(method, Overloaded): items = [] original_type = get_proper_type(original_type) for c in method.items: if isinstance(original_type, Instance): # Filter based on whether declared self type can match actual object type. # For example, if self has type C[int] and method is accessed on a C[str] value, # omit this item. This is best effort since bind_self can be called in many # contexts, and doing complete validation might trigger infinite recursion. # # Note that overload item filtering normally happens elsewhere. This is needed # at least during constraint inference. keep = is_valid_self_type_best_effort(c, original_type) else: keep = True if keep: items.append(bind_self(c, original_type, is_classmethod, ignore_instances)) if len(items) == 0: # If no item matches, returning all items helps avoid some spurious errors items = [ bind_self(c, original_type, is_classmethod, ignore_instances) for c in method.items ] return cast(F, Overloaded(items)) assert isinstance(method, CallableType) func = method if not func.arg_types: # Invalid method, return something. return cast(F, func) if func.arg_kinds[0] in (ARG_STAR, ARG_STAR2): # The signature is of the form 'def foo(*args, ...)'. # In this case we shouldn't drop the first arg, # since func will be absorbed by the *args. # TODO: infer bounds on the type of *args? # In the case of **kwargs we should probably emit an error, but # for now we simply skip it, to avoid crashes down the line. return cast(F, func) self_param_type = get_proper_type(func.arg_types[0]) variables: Sequence[TypeVarLikeType] # Having a def __call__(self: Callable[...], ...) can cause infinite recursion. Although # this special-casing looks not very principled, there is nothing meaningful we can infer # from such definition, since it is inherently indefinitely recursive. allow_callable = func.name is None or not func.name.startswith("__call__ of") if func.variables and supported_self_type( self_param_type, allow_callable=allow_callable, allow_instances=not ignore_instances ): from mypy.infer import infer_type_arguments if original_type is None: # TODO: type check method override (see #7861). original_type = erase_to_bound(self_param_type) original_type = get_proper_type(original_type) # Find which of method type variables appear in the type of "self". self_ids = {tv.id for tv in get_all_type_vars(self_param_type)} self_vars = [tv for tv in func.variables if tv.id in self_ids] # Solve for these type arguments using the actual class or instance type. typeargs = infer_type_arguments( self_vars, self_param_type, original_type, is_supertype=True ) if ( is_classmethod and any(isinstance(get_proper_type(t), UninhabitedType) for t in typeargs) and isinstance(original_type, (Instance, TypeVarType, TupleType)) ): # In case we call a classmethod through an instance x, fallback to type(x). typeargs = infer_type_arguments( self_vars, self_param_type, TypeType(original_type), is_supertype=True ) # Update the method signature with the solutions found. # Technically, some constraints might be unsolvable, make them Never. to_apply = [t if t is not None else UninhabitedType() for t in typeargs] func = expand_type(func, {tv.id: arg for tv, arg in zip(self_vars, to_apply)}) variables = [v for v in func.variables if v not in self_vars] else: variables = func.variables original_type = get_proper_type(original_type) if isinstance(original_type, CallableType) and original_type.is_type_obj(): original_type = TypeType.make_normalized(original_type.ret_type) res = func.copy_modified( arg_types=func.arg_types[1:], arg_kinds=func.arg_kinds[1:], arg_names=func.arg_names[1:], variables=variables, bound_args=[original_type], ) return cast(F, res) def is_valid_self_type_best_effort(c: CallableType, self_type: Instance) -> bool: """Quickly check if self_type might match the self in a callable. Avoid performing any complex type operations. This is performance-critical. Default to returning True if we don't know (or it would be too expensive). """ if ( self_type.args and c.arg_types and isinstance((arg_type := get_proper_type(c.arg_types[0])), Instance) and c.arg_kinds[0] in (ARG_POS, ARG_OPT) and arg_type.args and self_type.type.fullname != "functools._SingleDispatchCallable" ): if self_type.type is not arg_type.type: # We can't map to supertype, since it could trigger expensive checks for # protocol types, so we consevatively assume this is fine. return True # Fast path: no explicit annotation on self if all( ( type(arg) is TypeVarType and type(arg.upper_bound) is Instance and arg.upper_bound.type.fullname == "builtins.object" ) for arg in arg_type.args ): return True from mypy.meet import is_overlapping_types return is_overlapping_types(self_type, c.arg_types[0]) return True def erase_to_bound(t: Type) -> Type: # TODO: use value restrictions to produce a union? t = get_proper_type(t) if isinstance(t, TypeVarType): return t.upper_bound if isinstance(t, TypeType): if isinstance(t.item, TypeVarType): return TypeType.make_normalized(t.item.upper_bound) return t def callable_corresponding_argument( typ: NormalizedCallableType | Parameters, model: FormalArgument ) -> FormalArgument | None: """Return the argument a function that corresponds to `model`""" by_name = typ.argument_by_name(model.name) by_pos = typ.argument_by_position(model.pos) if by_name is None and by_pos is None: return None if by_name is not None and by_pos is not None: if by_name == by_pos: return by_name # If we're dealing with an optional pos-only and an optional # name-only arg, merge them. This is the case for all functions # taking both *args and **args, or a pair of functions like so: # def right(a: int = ...) -> None: ... # def left(__a: int = ..., *, a: int = ...) -> None: ... from mypy.subtypes import is_equivalent if ( not (by_name.required or by_pos.required) and by_pos.name is None and by_name.pos is None and is_equivalent(by_name.typ, by_pos.typ) ): return FormalArgument(by_name.name, by_pos.pos, by_name.typ, False) return by_name if by_name is not None else by_pos def simple_literal_type(t: ProperType | None) -> Instance | None: """Extract the underlying fallback Instance type for a simple Literal""" if isinstance(t, Instance) and t.last_known_value is not None: t = t.last_known_value if isinstance(t, LiteralType): return t.fallback return None def is_simple_literal(t: ProperType) -> bool: if isinstance(t, LiteralType): return t.fallback.type.is_enum or t.fallback.type.fullname == "builtins.str" if isinstance(t, Instance): return t.last_known_value is not None and isinstance(t.last_known_value.value, str) return False def make_simplified_union( items: Sequence[Type], line: int = -1, column: int = -1, *, keep_erased: bool = False, contract_literals: bool = True, handle_recursive: bool = True, ) -> ProperType: """Build union type with redundant union items removed. If only a single item remains, this may return a non-union type. Examples: * [int, str] -> Union[int, str] * [int, object] -> object * [int, int] -> int * [int, Any] -> Union[int, Any] (Any types are not simplified away!) * [Any, Any] -> Any * [int, Union[bytes, str]] -> Union[int, bytes, str] Note: This must NOT be used during semantic analysis, since TypeInfos may not be fully initialized. The keep_erased flag is used for type inference against union types containing type variables. If set to True, keep all ErasedType items. The contract_literals flag indicates whether we need to contract literal types back into a sum type. Set it to False when called by try_expanding_sum_type_ to_union(). """ # Step 1: expand all nested unions items = flatten_nested_unions(items, handle_recursive=handle_recursive) # Step 2: fast path for single item if len(items) == 1: return get_proper_type(items[0]) # Step 3: remove redundant unions simplified_set: Sequence[Type] = _remove_redundant_union_items(items, keep_erased) # Step 4: If more than one literal exists in the union, try to simplify if ( contract_literals and sum(isinstance(get_proper_type(item), LiteralType) for item in simplified_set) > 1 ): simplified_set = try_contracting_literals_in_union(simplified_set) result = get_proper_type(UnionType.make_union(simplified_set, line, column)) nitems = len(items) if nitems > 1 and ( nitems > 2 or not (type(items[0]) is NoneType or type(items[1]) is NoneType) ): # Step 5: At last, we erase any (inconsistent) extra attributes on instances. # Initialize with None instead of an empty set as a micro-optimization. The set # is needed very rarely, so we try to avoid constructing it. extra_attrs_set: set[ExtraAttrs] | None = None for item in items: instance = try_getting_instance_fallback(item) if instance and instance.extra_attrs: if extra_attrs_set is None: extra_attrs_set = {instance.extra_attrs} else: extra_attrs_set.add(instance.extra_attrs) if extra_attrs_set is not None and len(extra_attrs_set) > 1: fallback = try_getting_instance_fallback(result) if fallback: fallback.extra_attrs = None return result def _remove_redundant_union_items(items: list[Type], keep_erased: bool) -> list[Type]: from mypy.subtypes import is_proper_subtype # The first pass through this loop, we check if later items are subtypes of earlier items. # The second pass through this loop, we check if earlier items are subtypes of later items # (by reversing the remaining items) for _direction in range(2): new_items: list[Type] = [] # seen is a map from a type to its index in new_items seen: dict[ProperType, int] = {} unduplicated_literal_fallbacks: set[Instance] | None = None for ti in items: proper_ti = get_proper_type(ti) # UninhabitedType is always redundant if isinstance(proper_ti, UninhabitedType): continue duplicate_index = -1 # Quickly check if we've seen this type if proper_ti in seen: duplicate_index = seen[proper_ti] elif ( isinstance(proper_ti, LiteralType) and unduplicated_literal_fallbacks is not None and proper_ti.fallback in unduplicated_literal_fallbacks ): # This is an optimisation for unions with many LiteralType # We've already checked for exact duplicates. This means that any super type of # the LiteralType must be a super type of its fallback. If we've gone through # the expensive loop below and found no super type for a previous LiteralType # with the same fallback, we can skip doing that work again and just add the type # to new_items pass else: # If not, check if we've seen a supertype of this type for j, tj in enumerate(new_items): tj = get_proper_type(tj) # If tj is an Instance with a last_known_value, do not remove proper_ti # (unless it's an instance with the same last_known_value) if ( isinstance(tj, Instance) and tj.last_known_value is not None and not ( isinstance(proper_ti, Instance) and tj.last_known_value == proper_ti.last_known_value ) ): continue if is_proper_subtype( ti, tj, keep_erased_types=keep_erased, ignore_promotions=True ): duplicate_index = j break if duplicate_index != -1: # If deleted subtypes had more general truthiness, use that orig_item = new_items[duplicate_index] if not orig_item.can_be_true and ti.can_be_true: new_items[duplicate_index] = true_or_false(orig_item) elif not orig_item.can_be_false and ti.can_be_false: new_items[duplicate_index] = true_or_false(orig_item) else: # We have a non-duplicate item, add it to new_items seen[proper_ti] = len(new_items) new_items.append(ti) if isinstance(proper_ti, LiteralType): if unduplicated_literal_fallbacks is None: unduplicated_literal_fallbacks = set() unduplicated_literal_fallbacks.add(proper_ti.fallback) items = new_items if len(items) <= 1: break items.reverse() return items def _get_type_method_ret_type(t: Type, *, name: str) -> Type | None: t = get_proper_type(t) if isinstance(t, Instance): sym = t.type.get(name) if sym: sym_type = get_proper_type(sym.type) if isinstance(sym_type, CallableType): return sym_type.ret_type return None def true_only(t: Type) -> ProperType: """ Restricted version of t with only True-ish values """ t = get_proper_type(t) if not t.can_be_true: # All values of t are False-ish, so there are no true values in it return UninhabitedType(line=t.line, column=t.column) elif not t.can_be_false: # All values of t are already True-ish, so true_only is idempotent in this case return t elif isinstance(t, UnionType): # The true version of a union type is the union of the true versions of its components new_items = [true_only(item) for item in t.items] can_be_true_items = [item for item in new_items if item.can_be_true] return make_simplified_union(can_be_true_items, line=t.line, column=t.column) else: ret_type = _get_type_method_ret_type(t, name="__bool__") or _get_type_method_ret_type( t, name="__len__" ) if ret_type and not ret_type.can_be_true: return UninhabitedType(line=t.line, column=t.column) new_t = copy_type(t) new_t.can_be_false = False return new_t def false_only(t: Type) -> ProperType: """ Restricted version of t with only False-ish values """ t = get_proper_type(t) if not t.can_be_false: if state.strict_optional: # All values of t are True-ish, so there are no false values in it return UninhabitedType(line=t.line) else: # When strict optional checking is disabled, everything can be # False-ish since anything can be None return NoneType(line=t.line) elif not t.can_be_true: # All values of t are already False-ish, so false_only is idempotent in this case return t elif isinstance(t, UnionType): # The false version of a union type is the union of the false versions of its components new_items = [false_only(item) for item in t.items] can_be_false_items = [item for item in new_items if item.can_be_false] return make_simplified_union(can_be_false_items, line=t.line, column=t.column) elif isinstance(t, Instance) and t.type.fullname in ("builtins.str", "builtins.bytes"): return LiteralType("", fallback=t) elif isinstance(t, Instance) and t.type.fullname == "builtins.int": return LiteralType(0, fallback=t) else: ret_type = _get_type_method_ret_type(t, name="__bool__") or _get_type_method_ret_type( t, name="__len__" ) if ret_type: if not ret_type.can_be_false: return UninhabitedType(line=t.line) elif isinstance(t, Instance) and t.type.is_final: return UninhabitedType(line=t.line) new_t = copy_type(t) new_t.can_be_true = False return new_t def true_or_false(t: Type) -> ProperType: """ Unrestricted version of t with both True-ish and False-ish values """ t = get_proper_type(t) if isinstance(t, UnionType): new_items = [true_or_false(item) for item in t.items] return make_simplified_union(new_items, line=t.line, column=t.column) new_t = copy_type(t) new_t.can_be_true = new_t.can_be_true_default() new_t.can_be_false = new_t.can_be_false_default() return new_t def erase_def_to_union_or_bound(tdef: TypeVarLikeType) -> Type: # TODO(PEP612): fix for ParamSpecType if isinstance(tdef, ParamSpecType): return AnyType(TypeOfAny.from_error) if isinstance(tdef, TypeVarType) and tdef.values: return make_simplified_union(tdef.values) else: return tdef.upper_bound def erase_to_union_or_bound(typ: TypeVarType) -> ProperType: if typ.values: return make_simplified_union(typ.values) else: return get_proper_type(typ.upper_bound) def function_type(func: FuncBase, fallback: Instance) -> FunctionLike: if func.type: assert isinstance(func.type, FunctionLike) return func.type else: # Implicit type signature with dynamic types. if isinstance(func, FuncItem): return callable_type(func, fallback) else: # Broken overloads can have self.type set to None. # TODO: should we instead always set the type in semantic analyzer? assert isinstance(func, OverloadedFuncDef) any_type = AnyType(TypeOfAny.from_error) dummy = CallableType( [any_type, any_type], [ARG_STAR, ARG_STAR2], [None, None], any_type, fallback, line=func.line, is_ellipsis_args=True, ) # Return an Overloaded, because some callers may expect that # an OverloadedFuncDef has an Overloaded type. return Overloaded([dummy]) def callable_type( fdef: FuncItem, fallback: Instance, ret_type: Type | None = None ) -> CallableType: # TODO: somewhat unfortunate duplication with prepare_method_signature in semanal if fdef.info and (not fdef.is_static or fdef.name == "__new__") and fdef.arg_names: self_type: Type = fill_typevars(fdef.info) if fdef.is_class or fdef.name == "__new__": self_type = TypeType.make_normalized(self_type) args = [self_type] + [AnyType(TypeOfAny.unannotated)] * (len(fdef.arg_names) - 1) else: args = [AnyType(TypeOfAny.unannotated)] * len(fdef.arg_names) return CallableType( args, fdef.arg_kinds, fdef.arg_names, ret_type or AnyType(TypeOfAny.unannotated), fallback, name=fdef.name, line=fdef.line, column=fdef.column, implicit=True, # We need this for better error messages, like missing `self` note: definition=fdef if isinstance(fdef, FuncDef) else None, ) def try_getting_str_literals(expr: Expression, typ: Type) -> list[str] | None: """If the given expression or type corresponds to a string literal or a union of string literals, returns a list of the underlying strings. Otherwise, returns None. Specifically, this function is guaranteed to return a list with one or more strings if one of the following is true: 1. 'expr' is a StrExpr 2. 'typ' is a LiteralType containing a string 3. 'typ' is a UnionType containing only LiteralType of strings """ if isinstance(expr, StrExpr): return [expr.value] # TODO: See if we can eliminate this function and call the below one directly return try_getting_str_literals_from_type(typ) def try_getting_str_literals_from_type(typ: Type) -> list[str] | None: """If the given expression or type corresponds to a string Literal or a union of string Literals, returns a list of the underlying strings. Otherwise, returns None. For example, if we had the type 'Literal["foo", "bar"]' as input, this function would return a list of strings ["foo", "bar"]. """ return try_getting_literals_from_type(typ, str, "builtins.str") def try_getting_int_literals_from_type(typ: Type) -> list[int] | None: """If the given expression or type corresponds to an int Literal or a union of int Literals, returns a list of the underlying ints. Otherwise, returns None. For example, if we had the type 'Literal[1, 2, 3]' as input, this function would return a list of ints [1, 2, 3]. """ return try_getting_literals_from_type(typ, int, "builtins.int") T = TypeVar("T") def try_getting_literals_from_type( typ: Type, target_literal_type: type[T], target_fullname: str ) -> list[T] | None: """If the given expression or type corresponds to a Literal or union of Literals where the underlying values correspond to the given target type, returns a list of those underlying values. Otherwise, returns None. """ typ = get_proper_type(typ) if isinstance(typ, Instance) and typ.last_known_value is not None: possible_literals: list[Type] = [typ.last_known_value] elif isinstance(typ, UnionType): possible_literals = list(typ.items) else: possible_literals = [typ] literals: list[T] = [] for lit in get_proper_types(possible_literals): if isinstance(lit, LiteralType) and lit.fallback.type.fullname == target_fullname: val = lit.value if isinstance(val, target_literal_type): literals.append(val) else: return None else: return None return literals def is_literal_type_like(t: Type | None) -> bool: """Returns 'true' if the given type context is potentially either a LiteralType, a Union of LiteralType, or something similar. """ t = get_proper_type(t) if t is None: return False elif isinstance(t, LiteralType): return True elif isinstance(t, UnionType): return any(is_literal_type_like(item) for item in t.items) elif isinstance(t, TypeVarType): return is_literal_type_like(t.upper_bound) or any( is_literal_type_like(item) for item in t.values ) else: return False def is_singleton_type(typ: Type) -> bool: """Returns 'true' if this type is a "singleton type" -- if there exists exactly only one runtime value associated with this type. That is, given two values 'a' and 'b' that have the same type 't', 'is_singleton_type(t)' returns True if and only if the expression 'a is b' is always true. Currently, this returns True when given NoneTypes, enum LiteralTypes, enum types with a single value and ... (Ellipses). Note that other kinds of LiteralTypes cannot count as singleton types. For example, suppose we do 'a = 100000 + 1' and 'b = 100001'. It is not guaranteed that 'a is b' will always be true -- some implementations of Python will end up constructing two distinct instances of 100001. """ typ = get_proper_type(typ) return typ.is_singleton_type() def try_expanding_sum_type_to_union(typ: Type, target_fullname: str) -> ProperType: """Attempts to recursively expand any enum Instances with the given target_fullname into a Union of all of its component LiteralTypes. For example, if we have: class Color(Enum): RED = 1 BLUE = 2 YELLOW = 3 class Status(Enum): SUCCESS = 1 FAILURE = 2 UNKNOWN = 3 ...and if we call `try_expanding_enum_to_union(Union[Color, Status], 'module.Color')`, this function will return Literal[Color.RED, Color.BLUE, Color.YELLOW, Status]. """ typ = get_proper_type(typ) if isinstance(typ, UnionType): items = [ try_expanding_sum_type_to_union(item, target_fullname) for item in typ.relevant_items() ] return make_simplified_union(items, contract_literals=False) elif isinstance(typ, Instance) and typ.type.fullname == target_fullname: if typ.type.is_enum: new_items = [] for name, symbol in typ.type.names.items(): if not isinstance(symbol.node, Var): continue # Skip these since Enum will remove it if name in ENUM_REMOVED_PROPS: continue # Skip private attributes if name.startswith("__"): continue new_items.append(LiteralType(name, typ)) return make_simplified_union(new_items, contract_literals=False) elif typ.type.fullname == "builtins.bool": return make_simplified_union( [LiteralType(True, typ), LiteralType(False, typ)], contract_literals=False ) return typ def try_contracting_literals_in_union(types: Sequence[Type]) -> list[ProperType]: """Contracts any literal types back into a sum type if possible. Will replace the first instance of the literal with the sum type and remove all others. If we call `try_contracting_union(Literal[Color.RED, Color.BLUE, Color.YELLOW])`, this function will return Color. We also treat `Literal[True, False]` as `bool`. """ proper_types = [get_proper_type(typ) for typ in types] sum_types: dict[str, tuple[set[Any], list[int]]] = {} marked_for_deletion = set() for idx, typ in enumerate(proper_types): if isinstance(typ, LiteralType): fullname = typ.fallback.type.fullname if typ.fallback.type.is_enum or isinstance(typ.value, bool): if fullname not in sum_types: sum_types[fullname] = ( ( set(typ.fallback.get_enum_values()) if typ.fallback.type.is_enum else {True, False} ), [], ) literals, indexes = sum_types[fullname] literals.discard(typ.value) indexes.append(idx) if not literals: first, *rest = indexes proper_types[first] = typ.fallback marked_for_deletion |= set(rest) return list( itertools.compress( proper_types, [(i not in marked_for_deletion) for i in range(len(proper_types))] ) ) def coerce_to_literal(typ: Type) -> Type: """Recursively converts any Instances that have a last_known_value or are instances of enum types with a single value into the corresponding LiteralType. """ original_type = typ typ = get_proper_type(typ) if isinstance(typ, UnionType): new_items = [coerce_to_literal(item) for item in typ.items] return UnionType.make_union(new_items) elif isinstance(typ, Instance): if typ.last_known_value: return typ.last_known_value elif typ.type.is_enum: enum_values = typ.get_enum_values() if len(enum_values) == 1: return LiteralType(value=enum_values[0], fallback=typ) return original_type def get_type_vars(tp: Type) -> list[TypeVarType]: return cast("list[TypeVarType]", tp.accept(TypeVarExtractor())) def get_all_type_vars(tp: Type) -> list[TypeVarLikeType]: # TODO: should we always use this function instead of get_type_vars() above? return tp.accept(TypeVarExtractor(include_all=True)) class TypeVarExtractor(TypeQuery[List[TypeVarLikeType]]): def __init__(self, include_all: bool = False) -> None: super().__init__(self._merge) self.include_all = include_all def _merge(self, iter: Iterable[list[TypeVarLikeType]]) -> list[TypeVarLikeType]: out = [] for item in iter: out.extend(item) return out def visit_type_var(self, t: TypeVarType) -> list[TypeVarLikeType]: return [t] def visit_param_spec(self, t: ParamSpecType) -> list[TypeVarLikeType]: return [t] if self.include_all else [] def visit_type_var_tuple(self, t: TypeVarTupleType) -> list[TypeVarLikeType]: return [t] if self.include_all else [] def custom_special_method(typ: Type, name: str, check_all: bool = False) -> bool: """Does this type have a custom special method such as __format__() or __eq__()? If check_all is True ensure all items of a union have a custom method, not just some. """ typ = get_proper_type(typ) if isinstance(typ, Instance): method = typ.type.get(name) if method and isinstance(method.node, (SYMBOL_FUNCBASE_TYPES, Decorator, Var)): if method.node.info: return not method.node.info.fullname.startswith(("builtins.", "typing.")) return False if isinstance(typ, UnionType): if check_all: return all(custom_special_method(t, name, check_all) for t in typ.items) return any(custom_special_method(t, name) for t in typ.items) if isinstance(typ, TupleType): return custom_special_method(tuple_fallback(typ), name, check_all) if isinstance(typ, FunctionLike) and typ.is_type_obj(): # Look up __method__ on the metaclass for class objects. return custom_special_method(typ.fallback, name, check_all) if isinstance(typ, AnyType): # Avoid false positives in uncertain cases. return True # TODO: support other types (see ExpressionChecker.has_member())? return False def separate_union_literals(t: UnionType) -> tuple[Sequence[LiteralType], Sequence[Type]]: """Separate literals from other members in a union type.""" literal_items = [] union_items = [] for item in t.items: proper = get_proper_type(item) if isinstance(proper, LiteralType): literal_items.append(proper) else: union_items.append(item) return literal_items, union_items def try_getting_instance_fallback(typ: Type) -> Instance | None: """Returns the Instance fallback for this type if one exists or None.""" typ = get_proper_type(typ) if isinstance(typ, Instance): return typ elif isinstance(typ, LiteralType): return typ.fallback elif isinstance(typ, NoneType): return None # Fast path for None, which is common elif isinstance(typ, FunctionLike): return typ.fallback elif isinstance(typ, TupleType): return typ.partial_fallback elif isinstance(typ, TypedDictType): return typ.fallback elif isinstance(typ, TypeVarType): return try_getting_instance_fallback(typ.upper_bound) return None def fixup_partial_type(typ: Type) -> Type: """Convert a partial type that we couldn't resolve into something concrete. This means, for None we make it Optional[Any], and for anything else we fill in all of the type arguments with Any. """ if not isinstance(typ, PartialType): return typ if typ.type is None: return UnionType.make_union([AnyType(TypeOfAny.unannotated), NoneType()]) else: return Instance(typ.type, [AnyType(TypeOfAny.unannotated)] * len(typ.type.type_vars)) def get_protocol_member(left: Instance, member: str, class_obj: bool) -> ProperType | None: if member == "__call__" and class_obj: # Special case: class objects always have __call__ that is just the constructor. from mypy.checkmember import type_object_type def named_type(fullname: str) -> Instance: return Instance(left.type.mro[-1], []) return type_object_type(left.type, named_type) if member == "__call__" and left.type.is_metaclass(): # Special case: we want to avoid falling back to metaclass __call__ # if constructor signature didn't match, this can cause many false negatives. return None from mypy.subtypes import find_member return get_proper_type(find_member(member, left, left, class_obj=class_obj))
algorandfoundation/puya
src/puyapy/_vendor/mypy/typeops.py
Python
NOASSERTION
44,620
"""Classes for representing mypy types.""" from __future__ import annotations import sys from abc import abstractmethod from typing import ( TYPE_CHECKING, Any, ClassVar, Dict, Final, Iterable, NamedTuple, NewType, Sequence, TypeVar, Union, cast, ) from typing_extensions import Self, TypeAlias as _TypeAlias, TypeGuard, overload import mypy.nodes from mypy.bogus_type import Bogus from mypy.nodes import ( ARG_POS, ARG_STAR, ARG_STAR2, INVARIANT, ArgKind, FakeInfo, FuncDef, SymbolNode, ) from mypy.options import Options from mypy.state import state from mypy.util import IdMapper T = TypeVar("T") JsonDict: _TypeAlias = Dict[str, Any] # The set of all valid expressions that can currently be contained # inside of a Literal[...]. # # Literals can contain bytes and enum-values: we special-case both of these # and store the value as a string. We rely on the fallback type that's also # stored with the Literal to determine how a string is being used. # # TODO: confirm that we're happy with representing enums (and the # other types) in the manner described above. # # Note: if we change the set of types included below, we must also # make sure to audit the following methods: # # 1. types.LiteralType's serialize and deserialize methods: this method # needs to make sure it can convert the below types into JSON and back. # # 2. types.LiteralType's 'value_repr` method: this method is ultimately used # by TypeStrVisitor's visit_literal_type to generate a reasonable # repr-able output. # # 3. server.astdiff.SnapshotTypeVisitor's visit_literal_type_method: this # method assumes that the following types supports equality checks and # hashability. # # Note: Although "Literal[None]" is a valid type, we internally always convert # such a type directly into "None". So, "None" is not a valid parameter of # LiteralType and is omitted from this list. # # Note: Float values are only used internally. They are not accepted within # Literal[...]. LiteralValue: _TypeAlias = Union[int, str, bool, float] # If we only import type_visitor in the middle of the file, mypy # breaks, and if we do it at the top, it breaks at runtime because of # import cycle issues, so we do it at the top while typechecking and # then again in the middle at runtime. # We should be able to remove this once we are switched to the new # semantic analyzer! if TYPE_CHECKING: from mypy.type_visitor import ( SyntheticTypeVisitor as SyntheticTypeVisitor, TypeVisitor as TypeVisitor, ) TYPE_VAR_LIKE_NAMES: Final = ( "typing.TypeVar", "typing_extensions.TypeVar", "typing.ParamSpec", "typing_extensions.ParamSpec", "typing.TypeVarTuple", "typing_extensions.TypeVarTuple", ) TYPED_NAMEDTUPLE_NAMES: Final = ("typing.NamedTuple", "typing_extensions.NamedTuple") # Supported names of TypedDict type constructors. TPDICT_NAMES: Final = ( "typing.TypedDict", "typing_extensions.TypedDict", "mypy_extensions.TypedDict", ) # Supported fallback instance type names for TypedDict types. TPDICT_FB_NAMES: Final = ( "typing._TypedDict", "typing_extensions._TypedDict", "mypy_extensions._TypedDict", ) # Supported names of Protocol base class. PROTOCOL_NAMES: Final = ("typing.Protocol", "typing_extensions.Protocol") # Supported TypeAlias names. TYPE_ALIAS_NAMES: Final = ("typing.TypeAlias", "typing_extensions.TypeAlias") # Supported Final type names. FINAL_TYPE_NAMES: Final = ("typing.Final", "typing_extensions.Final") # Supported @final decorator names. FINAL_DECORATOR_NAMES: Final = ("typing.final", "typing_extensions.final") # Supported @type_check_only names. TYPE_CHECK_ONLY_NAMES: Final = ("typing.type_check_only", "typing_extensions.type_check_only") # Supported Literal type names. LITERAL_TYPE_NAMES: Final = ("typing.Literal", "typing_extensions.Literal") # Supported Annotated type names. ANNOTATED_TYPE_NAMES: Final = ("typing.Annotated", "typing_extensions.Annotated") # Supported @deprecated type names DEPRECATED_TYPE_NAMES: Final = ("warnings.deprecated", "typing_extensions.deprecated") # We use this constant in various places when checking `tuple` subtyping: TUPLE_LIKE_INSTANCE_NAMES: Final = ( "builtins.tuple", "typing.Iterable", "typing.Container", "typing.Sequence", "typing.Reversible", ) IMPORTED_REVEAL_TYPE_NAMES: Final = ("typing.reveal_type", "typing_extensions.reveal_type") REVEAL_TYPE_NAMES: Final = ("builtins.reveal_type", *IMPORTED_REVEAL_TYPE_NAMES) ASSERT_TYPE_NAMES: Final = ("typing.assert_type", "typing_extensions.assert_type") OVERLOAD_NAMES: Final = ("typing.overload", "typing_extensions.overload") # Attributes that can optionally be defined in the body of a subclass of # enum.Enum but are removed from the class __dict__ by EnumMeta. ENUM_REMOVED_PROPS: Final = ("_ignore_", "_order_", "__order__") NEVER_NAMES: Final = ( "typing.NoReturn", "typing_extensions.NoReturn", "mypy_extensions.NoReturn", "typing.Never", "typing_extensions.Never", ) # Mypyc fixed-width native int types (compatible with builtins.int) MYPYC_NATIVE_INT_NAMES: Final = ( "mypy_extensions.i64", "mypy_extensions.i32", "mypy_extensions.i16", "mypy_extensions.u8", ) DATACLASS_TRANSFORM_NAMES: Final = ( "typing.dataclass_transform", "typing_extensions.dataclass_transform", ) # Supported @override decorator names. OVERRIDE_DECORATOR_NAMES: Final = ("typing.override", "typing_extensions.override") # A placeholder used for Bogus[...] parameters _dummy: Final[Any] = object() # A placeholder for int parameters _dummy_int: Final = -999999 class TypeOfAny: """ This class describes different types of Any. Each 'Any' can be of only one type at a time. """ __slots__ = () # Was this Any type inferred without a type annotation? unannotated: Final = 1 # Does this Any come from an explicit type annotation? explicit: Final = 2 # Does this come from an unfollowed import? See --disallow-any-unimported option from_unimported_type: Final = 3 # Does this Any type come from omitted generics? from_omitted_generics: Final = 4 # Does this Any come from an error? from_error: Final = 5 # Is this a type that can't be represented in mypy's type system? For instance, type of # call to NewType(...). Even though these types aren't real Anys, we treat them as such. # Also used for variables named '_'. special_form: Final = 6 # Does this Any come from interaction with another Any? from_another_any: Final = 7 # Does this Any come from an implementation limitation/bug? implementation_artifact: Final = 8 # Does this Any come from use in the suggestion engine? This is # used to ignore Anys inserted by the suggestion engine when # generating constraints. suggestion_engine: Final = 9 def deserialize_type(data: JsonDict | str) -> Type: if isinstance(data, str): return Instance.deserialize(data) classname = data[".class"] method = deserialize_map.get(classname) if method is not None: return method(data) raise NotImplementedError(f"unexpected .class {classname}") class Type(mypy.nodes.Context): """Abstract base class for all types.""" __slots__ = ("_can_be_true", "_can_be_false") # 'can_be_true' and 'can_be_false' mean whether the value of the # expression can be true or false in a boolean context. They are useful # when inferring the type of logic expressions like `x and y`. # # For example: # * the literal `False` can't be true while `True` can. # * a value with type `bool` can be true or false. # * `None` can't be true # * ... def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) # Value of these can be -1 (use the default, lazy init), 0 (false) or 1 (true) self._can_be_true = -1 self._can_be_false = -1 @property def can_be_true(self) -> bool: if self._can_be_true == -1: # Lazy init helps mypyc self._can_be_true = self.can_be_true_default() return bool(self._can_be_true) @can_be_true.setter def can_be_true(self, v: bool) -> None: self._can_be_true = v @property def can_be_false(self) -> bool: if self._can_be_false == -1: # Lazy init helps mypyc self._can_be_false = self.can_be_false_default() return bool(self._can_be_false) @can_be_false.setter def can_be_false(self, v: bool) -> None: self._can_be_false = v def can_be_true_default(self) -> bool: return True def can_be_false_default(self) -> bool: return True def accept(self, visitor: TypeVisitor[T]) -> T: raise RuntimeError("Not implemented", type(self)) def __repr__(self) -> str: return self.accept(TypeStrVisitor(options=Options())) def str_with_options(self, options: Options) -> str: return self.accept(TypeStrVisitor(options=options)) def serialize(self) -> JsonDict | str: raise NotImplementedError(f"Cannot serialize {self.__class__.__name__} instance") @classmethod def deserialize(cls, data: JsonDict) -> Type: raise NotImplementedError(f"Cannot deserialize {cls.__name__} instance") def is_singleton_type(self) -> bool: return False class TypeAliasType(Type): """A type alias to another type. To support recursive type aliases we don't immediately expand a type alias during semantic analysis, but create an instance of this type that records the target alias definition node (mypy.nodes.TypeAlias) and type arguments (for generic aliases). This is very similar to how TypeInfo vs Instance interact, where a recursive class-based structure like class Node: value: int children: List[Node] can be represented in a tree-like manner. """ __slots__ = ("alias", "args", "type_ref") def __init__( self, alias: mypy.nodes.TypeAlias | None, args: list[Type], line: int = -1, column: int = -1, ) -> None: super().__init__(line, column) self.alias = alias self.args = args self.type_ref: str | None = None def _expand_once(self) -> Type: """Expand to the target type exactly once. This doesn't do full expansion, i.e. the result can contain another (or even this same) type alias. Use this internal helper only when really needed, its public wrapper mypy.types.get_proper_type() is preferred. """ assert self.alias is not None if self.alias.no_args: # We know that no_args=True aliases like L = List must have an instance # as their target. assert isinstance(self.alias.target, Instance) # type: ignore[misc] return self.alias.target.copy_modified(args=self.args) # TODO: this logic duplicates the one in expand_type_by_instance(). if self.alias.tvar_tuple_index is None: mapping = {v.id: s for (v, s) in zip(self.alias.alias_tvars, self.args)} else: prefix = self.alias.tvar_tuple_index suffix = len(self.alias.alias_tvars) - self.alias.tvar_tuple_index - 1 start, middle, end = split_with_prefix_and_suffix(tuple(self.args), prefix, suffix) tvar = self.alias.alias_tvars[prefix] assert isinstance(tvar, TypeVarTupleType) mapping = {tvar.id: TupleType(list(middle), tvar.tuple_fallback)} for tvar, sub in zip( self.alias.alias_tvars[:prefix] + self.alias.alias_tvars[prefix + 1 :], start + end ): mapping[tvar.id] = sub new_tp = self.alias.target.accept(InstantiateAliasVisitor(mapping)) new_tp.accept(LocationSetter(self.line, self.column)) new_tp.line = self.line new_tp.column = self.column return new_tp def _partial_expansion(self, nothing_args: bool = False) -> tuple[ProperType, bool]: # Private method mostly for debugging and testing. unroller = UnrollAliasVisitor(set(), {}) if nothing_args: alias = self.copy_modified(args=[UninhabitedType()] * len(self.args)) else: alias = self unrolled = alias.accept(unroller) assert isinstance(unrolled, ProperType) return unrolled, unroller.recursed def expand_all_if_possible(self, nothing_args: bool = False) -> ProperType | None: """Attempt a full expansion of the type alias (including nested aliases). If the expansion is not possible, i.e. the alias is (mutually-)recursive, return None. If nothing_args is True, replace all type arguments with an UninhabitedType() (used to detect recursively defined aliases). """ unrolled, recursed = self._partial_expansion(nothing_args=nothing_args) if recursed: return None return unrolled @property def is_recursive(self) -> bool: """Whether this type alias is recursive. Note this doesn't check generic alias arguments, but only if this alias *definition* is recursive. The property value thus can be cached on the underlying TypeAlias node. If you want to include all nested types, use has_recursive_types() function. """ assert self.alias is not None, "Unfixed type alias" is_recursive = self.alias._is_recursive if is_recursive is None: is_recursive = self.expand_all_if_possible(nothing_args=True) is None # We cache the value on the underlying TypeAlias node as an optimization, # since the value is the same for all instances of the same alias. self.alias._is_recursive = is_recursive return is_recursive def can_be_true_default(self) -> bool: if self.alias is not None: return self.alias.target.can_be_true return super().can_be_true_default() def can_be_false_default(self) -> bool: if self.alias is not None: return self.alias.target.can_be_false return super().can_be_false_default() def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_type_alias_type(self) def __hash__(self) -> int: return hash((self.alias, tuple(self.args))) def __eq__(self, other: object) -> bool: # Note: never use this to determine subtype relationships, use is_subtype(). if not isinstance(other, TypeAliasType): return NotImplemented return self.alias == other.alias and self.args == other.args def serialize(self) -> JsonDict: assert self.alias is not None data: JsonDict = { ".class": "TypeAliasType", "type_ref": self.alias.fullname, "args": [arg.serialize() for arg in self.args], } return data @classmethod def deserialize(cls, data: JsonDict) -> TypeAliasType: assert data[".class"] == "TypeAliasType" args: list[Type] = [] if "args" in data: args_list = data["args"] assert isinstance(args_list, list) args = [deserialize_type(arg) for arg in args_list] alias = TypeAliasType(None, args) alias.type_ref = data["type_ref"] return alias def copy_modified(self, *, args: list[Type] | None = None) -> TypeAliasType: return TypeAliasType( self.alias, args if args is not None else self.args.copy(), self.line, self.column ) class TypeGuardedType(Type): """Only used by find_isinstance_check() etc.""" __slots__ = ("type_guard",) def __init__(self, type_guard: Type) -> None: super().__init__(line=type_guard.line, column=type_guard.column) self.type_guard = type_guard def __repr__(self) -> str: return f"TypeGuard({self.type_guard})" class RequiredType(Type): """Required[T] or NotRequired[T]. Only usable at top-level of a TypedDict definition.""" def __init__(self, item: Type, *, required: bool) -> None: super().__init__(line=item.line, column=item.column) self.item = item self.required = required def __repr__(self) -> str: if self.required: return f"Required[{self.item}]" else: return f"NotRequired[{self.item}]" def accept(self, visitor: TypeVisitor[T]) -> T: return self.item.accept(visitor) class ReadOnlyType(Type): """ReadOnly[T] Only usable at top-level of a TypedDict definition.""" def __init__(self, item: Type) -> None: super().__init__(line=item.line, column=item.column) self.item = item def __repr__(self) -> str: return f"ReadOnly[{self.item}]" def accept(self, visitor: TypeVisitor[T]) -> T: return self.item.accept(visitor) class ProperType(Type): """Not a type alias. Every type except TypeAliasType must inherit from this type. """ __slots__ = () class TypeVarId: # A type variable is uniquely identified by its raw id and meta level. # For plain variables (type parameters of generic classes and # functions) raw ids are allocated by semantic analysis, using # positive ids 1, 2, ... for generic class parameters and negative # ids -1, ... for generic function type arguments. A special value 0 # is reserved for Self type variable (autogenerated). This convention # is only used to keep type variable ids distinct when allocating # them; the type checker makes no distinction between class and # function type variables. # Metavariables are allocated unique ids starting from 1. raw_id: int # Level of the variable in type inference. Currently either 0 for # declared types, or 1 for type inference metavariables. meta_level: int = 0 # Class variable used for allocating fresh ids for metavariables. next_raw_id: ClassVar[int] = 1 # Fullname of class or function/method which declares this type # variable (not the fullname of the TypeVar definition!), or '' namespace: str def __init__(self, raw_id: int, meta_level: int = 0, *, namespace: str = "") -> None: self.raw_id = raw_id self.meta_level = meta_level self.namespace = namespace @staticmethod def new(meta_level: int) -> TypeVarId: raw_id = TypeVarId.next_raw_id TypeVarId.next_raw_id += 1 return TypeVarId(raw_id, meta_level) def __repr__(self) -> str: return self.raw_id.__repr__() def __eq__(self, other: object) -> bool: return ( isinstance(other, TypeVarId) and self.raw_id == other.raw_id and self.meta_level == other.meta_level and self.namespace == other.namespace ) def __ne__(self, other: object) -> bool: return not (self == other) def __hash__(self) -> int: return hash((self.raw_id, self.meta_level, self.namespace)) def is_meta_var(self) -> bool: return self.meta_level > 0 def is_self(self) -> bool: # This is a special value indicating typing.Self variable. return self.raw_id == 0 class TypeVarLikeType(ProperType): __slots__ = ("name", "fullname", "id", "upper_bound", "default") name: str # Name (may be qualified) fullname: str # Fully qualified name id: TypeVarId upper_bound: Type default: Type def __init__( self, name: str, fullname: str, id: TypeVarId, upper_bound: Type, default: Type, line: int = -1, column: int = -1, ) -> None: super().__init__(line, column) self.name = name self.fullname = fullname self.id = id self.upper_bound = upper_bound self.default = default def serialize(self) -> JsonDict: raise NotImplementedError @classmethod def deserialize(cls, data: JsonDict) -> TypeVarLikeType: raise NotImplementedError def copy_modified(self, *, id: TypeVarId, **kwargs: Any) -> Self: raise NotImplementedError @classmethod def new_unification_variable(cls, old: Self) -> Self: new_id = TypeVarId.new(meta_level=1) return old.copy_modified(id=new_id) def has_default(self) -> bool: t = get_proper_type(self.default) return not (isinstance(t, AnyType) and t.type_of_any == TypeOfAny.from_omitted_generics) class TypeVarType(TypeVarLikeType): """Type that refers to a type variable.""" __slots__ = ("values", "variance") values: list[Type] # Value restriction, empty list if no restriction variance: int def __init__( self, name: str, fullname: str, id: TypeVarId, values: list[Type], upper_bound: Type, default: Type, variance: int = INVARIANT, line: int = -1, column: int = -1, ) -> None: super().__init__(name, fullname, id, upper_bound, default, line, column) assert values is not None, "No restrictions must be represented by empty list" self.values = values self.variance = variance def copy_modified( self, *, values: Bogus[list[Type]] = _dummy, upper_bound: Bogus[Type] = _dummy, default: Bogus[Type] = _dummy, id: Bogus[TypeVarId] = _dummy, line: int = _dummy_int, column: int = _dummy_int, **kwargs: Any, ) -> TypeVarType: return TypeVarType( name=self.name, fullname=self.fullname, id=self.id if id is _dummy else id, values=self.values if values is _dummy else values, upper_bound=self.upper_bound if upper_bound is _dummy else upper_bound, default=self.default if default is _dummy else default, variance=self.variance, line=self.line if line == _dummy_int else line, column=self.column if column == _dummy_int else column, ) def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_type_var(self) def __hash__(self) -> int: return hash((self.id, self.upper_bound, tuple(self.values))) def __eq__(self, other: object) -> bool: if not isinstance(other, TypeVarType): return NotImplemented return ( self.id == other.id and self.upper_bound == other.upper_bound and self.values == other.values ) def serialize(self) -> JsonDict: assert not self.id.is_meta_var() return { ".class": "TypeVarType", "name": self.name, "fullname": self.fullname, "id": self.id.raw_id, "namespace": self.id.namespace, "values": [v.serialize() for v in self.values], "upper_bound": self.upper_bound.serialize(), "default": self.default.serialize(), "variance": self.variance, } @classmethod def deserialize(cls, data: JsonDict) -> TypeVarType: assert data[".class"] == "TypeVarType" return TypeVarType( name=data["name"], fullname=data["fullname"], id=TypeVarId(data["id"], namespace=data["namespace"]), values=[deserialize_type(v) for v in data["values"]], upper_bound=deserialize_type(data["upper_bound"]), default=deserialize_type(data["default"]), variance=data["variance"], ) class ParamSpecFlavor: # Simple ParamSpec reference such as "P" BARE: Final = 0 # P.args ARGS: Final = 1 # P.kwargs KWARGS: Final = 2 class ParamSpecType(TypeVarLikeType): """Type that refers to a ParamSpec. A ParamSpec is a type variable that represents the parameter types, names and kinds of a callable (i.e., the signature without the return type). This can be one of these forms * P (ParamSpecFlavor.BARE) * P.args (ParamSpecFlavor.ARGS) * P.kwargs (ParamSpecFLavor.KWARGS) The upper_bound is really used as a fallback type -- it's shared with TypeVarType for simplicity. It can't be specified by the user and the value is directly derived from the flavor (currently always just 'object'). """ __slots__ = ("flavor", "prefix") flavor: int prefix: Parameters def __init__( self, name: str, fullname: str, id: TypeVarId, flavor: int, upper_bound: Type, default: Type, *, line: int = -1, column: int = -1, prefix: Parameters | None = None, ) -> None: super().__init__(name, fullname, id, upper_bound, default, line=line, column=column) self.flavor = flavor self.prefix = prefix or Parameters([], [], []) def with_flavor(self, flavor: int) -> ParamSpecType: return ParamSpecType( self.name, self.fullname, self.id, flavor, upper_bound=self.upper_bound, default=self.default, prefix=self.prefix, ) def copy_modified( self, *, id: Bogus[TypeVarId] = _dummy, flavor: int = _dummy_int, prefix: Bogus[Parameters] = _dummy, default: Bogus[Type] = _dummy, **kwargs: Any, ) -> ParamSpecType: return ParamSpecType( self.name, self.fullname, id if id is not _dummy else self.id, flavor if flavor != _dummy_int else self.flavor, self.upper_bound, default=default if default is not _dummy else self.default, line=self.line, column=self.column, prefix=prefix if prefix is not _dummy else self.prefix, ) def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_param_spec(self) def name_with_suffix(self) -> str: n = self.name if self.flavor == ParamSpecFlavor.ARGS: return f"{n}.args" elif self.flavor == ParamSpecFlavor.KWARGS: return f"{n}.kwargs" return n def __hash__(self) -> int: return hash((self.id, self.flavor, self.prefix)) def __eq__(self, other: object) -> bool: if not isinstance(other, ParamSpecType): return NotImplemented # Upper bound can be ignored, since it's determined by flavor. return self.id == other.id and self.flavor == other.flavor and self.prefix == other.prefix def serialize(self) -> JsonDict: assert not self.id.is_meta_var() return { ".class": "ParamSpecType", "name": self.name, "fullname": self.fullname, "id": self.id.raw_id, "namespace": self.id.namespace, "flavor": self.flavor, "upper_bound": self.upper_bound.serialize(), "default": self.default.serialize(), "prefix": self.prefix.serialize(), } @classmethod def deserialize(cls, data: JsonDict) -> ParamSpecType: assert data[".class"] == "ParamSpecType" return ParamSpecType( data["name"], data["fullname"], TypeVarId(data["id"], namespace=data["namespace"]), data["flavor"], deserialize_type(data["upper_bound"]), deserialize_type(data["default"]), prefix=Parameters.deserialize(data["prefix"]), ) class TypeVarTupleType(TypeVarLikeType): """Type that refers to a TypeVarTuple. See PEP646 for more information. """ __slots__ = ("tuple_fallback", "min_len") def __init__( self, name: str, fullname: str, id: TypeVarId, upper_bound: Type, tuple_fallback: Instance, default: Type, *, line: int = -1, column: int = -1, min_len: int = 0, ) -> None: super().__init__(name, fullname, id, upper_bound, default, line=line, column=column) self.tuple_fallback = tuple_fallback # This value is not settable by a user. It is an internal-only thing to support # len()-narrowing of variadic tuples. self.min_len = min_len def serialize(self) -> JsonDict: assert not self.id.is_meta_var() return { ".class": "TypeVarTupleType", "name": self.name, "fullname": self.fullname, "id": self.id.raw_id, "namespace": self.id.namespace, "upper_bound": self.upper_bound.serialize(), "tuple_fallback": self.tuple_fallback.serialize(), "default": self.default.serialize(), "min_len": self.min_len, } @classmethod def deserialize(cls, data: JsonDict) -> TypeVarTupleType: assert data[".class"] == "TypeVarTupleType" return TypeVarTupleType( data["name"], data["fullname"], TypeVarId(data["id"], namespace=data["namespace"]), deserialize_type(data["upper_bound"]), Instance.deserialize(data["tuple_fallback"]), deserialize_type(data["default"]), min_len=data["min_len"], ) def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_type_var_tuple(self) def __hash__(self) -> int: return hash((self.id, self.min_len)) def __eq__(self, other: object) -> bool: if not isinstance(other, TypeVarTupleType): return NotImplemented return self.id == other.id and self.min_len == other.min_len def copy_modified( self, *, id: Bogus[TypeVarId] = _dummy, upper_bound: Bogus[Type] = _dummy, default: Bogus[Type] = _dummy, min_len: Bogus[int] = _dummy, **kwargs: Any, ) -> TypeVarTupleType: return TypeVarTupleType( self.name, self.fullname, self.id if id is _dummy else id, self.upper_bound if upper_bound is _dummy else upper_bound, self.tuple_fallback, self.default if default is _dummy else default, line=self.line, column=self.column, min_len=self.min_len if min_len is _dummy else min_len, ) class UnboundType(ProperType): """Instance type that has not been bound during semantic analysis.""" __slots__ = ( "name", "args", "optional", "empty_tuple_index", "original_str_expr", "original_str_fallback", ) def __init__( self, name: str, args: Sequence[Type] | None = None, line: int = -1, column: int = -1, optional: bool = False, empty_tuple_index: bool = False, original_str_expr: str | None = None, original_str_fallback: str | None = None, ) -> None: super().__init__(line, column) if not args: args = [] self.name = name self.args = tuple(args) # Should this type be wrapped in an Optional? self.optional = optional # Special case for X[()] self.empty_tuple_index = empty_tuple_index # If this UnboundType was originally defined as a str or bytes, keep track of # the original contents of that string-like thing. This way, if this UnboundExpr # ever shows up inside of a LiteralType, we can determine whether that # Literal[...] is valid or not. E.g. Literal[foo] is most likely invalid # (unless 'foo' is an alias for another literal or something) and # Literal["foo"] most likely is. # # We keep track of the entire string instead of just using a boolean flag # so we can distinguish between things like Literal["foo"] vs # Literal[" foo "]. # # We also keep track of what the original base fallback type was supposed to be # so we don't have to try and recompute it later self.original_str_expr = original_str_expr self.original_str_fallback = original_str_fallback def copy_modified(self, args: Bogus[Sequence[Type] | None] = _dummy) -> UnboundType: if args is _dummy: args = self.args return UnboundType( name=self.name, args=args, line=self.line, column=self.column, optional=self.optional, empty_tuple_index=self.empty_tuple_index, original_str_expr=self.original_str_expr, original_str_fallback=self.original_str_fallback, ) def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_unbound_type(self) def __hash__(self) -> int: return hash((self.name, self.optional, tuple(self.args), self.original_str_expr)) def __eq__(self, other: object) -> bool: if not isinstance(other, UnboundType): return NotImplemented return ( self.name == other.name and self.optional == other.optional and self.args == other.args and self.original_str_expr == other.original_str_expr and self.original_str_fallback == other.original_str_fallback ) def serialize(self) -> JsonDict: return { ".class": "UnboundType", "name": self.name, "args": [a.serialize() for a in self.args], "expr": self.original_str_expr, "expr_fallback": self.original_str_fallback, } @classmethod def deserialize(cls, data: JsonDict) -> UnboundType: assert data[".class"] == "UnboundType" return UnboundType( data["name"], [deserialize_type(a) for a in data["args"]], original_str_expr=data["expr"], original_str_fallback=data["expr_fallback"], ) class CallableArgument(ProperType): """Represents a Arg(type, 'name') inside a Callable's type list. Note that this is a synthetic type for helping parse ASTs, not a real type. """ __slots__ = ("typ", "name", "constructor") typ: Type name: str | None constructor: str | None def __init__( self, typ: Type, name: str | None, constructor: str | None, line: int = -1, column: int = -1, ) -> None: super().__init__(line, column) self.typ = typ self.name = name self.constructor = constructor def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) ret: T = visitor.visit_callable_argument(self) return ret def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" class TypeList(ProperType): """Information about argument types and names [...]. This is used for the arguments of a Callable type, i.e. for [arg, ...] in Callable[[arg, ...], ret]. This is not a real type but a syntactic AST construct. UnboundTypes can also have TypeList types before they are processed into Callable types. """ __slots__ = ("items",) items: list[Type] def __init__(self, items: list[Type], line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.items = items def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) ret: T = visitor.visit_type_list(self) return ret def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" def __hash__(self) -> int: return hash(tuple(self.items)) def __eq__(self, other: object) -> bool: return isinstance(other, TypeList) and self.items == other.items class UnpackType(ProperType): """Type operator Unpack from PEP646. Can be either with Unpack[] or unpacking * syntax. The inner type should be either a TypeVarTuple, or a variable length tuple. In an exceptional case of callable star argument it can be a fixed length tuple. Note: the above restrictions are only guaranteed by normalizations after semantic analysis, if your code needs to handle UnpackType *during* semantic analysis, it is wild west, technically anything can be present in the wrapped type. """ __slots__ = ["type", "from_star_syntax"] def __init__( self, typ: Type, line: int = -1, column: int = -1, from_star_syntax: bool = False ) -> None: super().__init__(line, column) self.type = typ self.from_star_syntax = from_star_syntax def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_unpack_type(self) def serialize(self) -> JsonDict: return {".class": "UnpackType", "type": self.type.serialize()} @classmethod def deserialize(cls, data: JsonDict) -> UnpackType: assert data[".class"] == "UnpackType" typ = data["type"] return UnpackType(deserialize_type(typ)) def __hash__(self) -> int: return hash(self.type) def __eq__(self, other: object) -> bool: return isinstance(other, UnpackType) and self.type == other.type class AnyType(ProperType): """The type 'Any'.""" __slots__ = ("type_of_any", "source_any", "missing_import_name") def __init__( self, type_of_any: int, source_any: AnyType | None = None, missing_import_name: str | None = None, line: int = -1, column: int = -1, ) -> None: super().__init__(line, column) self.type_of_any = type_of_any # If this Any was created as a result of interacting with another 'Any', record the source # and use it in reports. self.source_any = source_any if source_any and source_any.source_any: self.source_any = source_any.source_any if source_any is None: self.missing_import_name = missing_import_name else: self.missing_import_name = source_any.missing_import_name # Only unimported type anys and anys from other anys should have an import name assert missing_import_name is None or type_of_any in ( TypeOfAny.from_unimported_type, TypeOfAny.from_another_any, ) # Only Anys that come from another Any can have source_any. assert type_of_any != TypeOfAny.from_another_any or source_any is not None # We should not have chains of Anys. assert not self.source_any or self.source_any.type_of_any != TypeOfAny.from_another_any @property def is_from_error(self) -> bool: return self.type_of_any == TypeOfAny.from_error def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_any(self) def copy_modified( self, # Mark with Bogus because _dummy is just an object (with type Any) type_of_any: int = _dummy_int, original_any: Bogus[AnyType | None] = _dummy, missing_import_name: Bogus[str | None] = _dummy, ) -> AnyType: if type_of_any == _dummy_int: type_of_any = self.type_of_any if original_any is _dummy: original_any = self.source_any if missing_import_name is _dummy: missing_import_name = self.missing_import_name return AnyType( type_of_any=type_of_any, source_any=original_any, missing_import_name=missing_import_name, line=self.line, column=self.column, ) def __hash__(self) -> int: return hash(AnyType) def __eq__(self, other: object) -> bool: return isinstance(other, AnyType) def serialize(self) -> JsonDict: return { ".class": "AnyType", "type_of_any": self.type_of_any, "source_any": self.source_any.serialize() if self.source_any is not None else None, "missing_import_name": self.missing_import_name, } @classmethod def deserialize(cls, data: JsonDict) -> AnyType: assert data[".class"] == "AnyType" source = data["source_any"] return AnyType( data["type_of_any"], AnyType.deserialize(source) if source is not None else None, data["missing_import_name"], ) class UninhabitedType(ProperType): """This type has no members. This type is the bottom type. With strict Optional checking, it is the only common subtype between all other types, which allows `meet` to be well defined. Without strict Optional checking, NoneType fills this role. In general, for any type T: join(UninhabitedType, T) = T meet(UninhabitedType, T) = UninhabitedType is_subtype(UninhabitedType, T) = True """ __slots__ = ("ambiguous",) ambiguous: bool # Is this a result of inference for a variable without constraints? def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.ambiguous = False def can_be_true_default(self) -> bool: return False def can_be_false_default(self) -> bool: return False def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_uninhabited_type(self) def __hash__(self) -> int: return hash(UninhabitedType) def __eq__(self, other: object) -> bool: return isinstance(other, UninhabitedType) def serialize(self) -> JsonDict: return {".class": "UninhabitedType"} @classmethod def deserialize(cls, data: JsonDict) -> UninhabitedType: assert data[".class"] == "UninhabitedType" return UninhabitedType() class NoneType(ProperType): """The type of 'None'. This type can be written by users as 'None'. """ __slots__ = () def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) def can_be_true_default(self) -> bool: return False def __hash__(self) -> int: return hash(NoneType) def __eq__(self, other: object) -> bool: return isinstance(other, NoneType) def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_none_type(self) def serialize(self) -> JsonDict: return {".class": "NoneType"} @classmethod def deserialize(cls, data: JsonDict) -> NoneType: assert data[".class"] == "NoneType" return NoneType() def is_singleton_type(self) -> bool: return True # NoneType used to be called NoneTyp so to avoid needlessly breaking # external plugins we keep that alias here. NoneTyp = NoneType class ErasedType(ProperType): """Placeholder for an erased type. This is used during type inference. This has the special property that it is ignored during type inference. """ __slots__ = () def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_erased_type(self) class DeletedType(ProperType): """Type of deleted variables. These can be used as lvalues but not rvalues. """ __slots__ = ("source",) source: str | None # May be None; name that generated this value def __init__(self, source: str | None = None, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self.source = source def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_deleted_type(self) def serialize(self) -> JsonDict: return {".class": "DeletedType", "source": self.source} @classmethod def deserialize(cls, data: JsonDict) -> DeletedType: assert data[".class"] == "DeletedType" return DeletedType(data["source"]) # Fake TypeInfo to be used as a placeholder during Instance de-serialization. NOT_READY: Final = mypy.nodes.FakeInfo("De-serialization failure: TypeInfo not fixed") class ExtraAttrs: """Summary of module attributes and types. This is used for instances of types.ModuleType, because they can have different attributes per instance, and for type narrowing with hasattr() checks. """ def __init__( self, attrs: dict[str, Type], immutable: set[str] | None = None, mod_name: str | None = None, ) -> None: self.attrs = attrs if immutable is None: immutable = set() self.immutable = immutable self.mod_name = mod_name def __hash__(self) -> int: return hash((tuple(self.attrs.items()), tuple(sorted(self.immutable)))) def __eq__(self, other: object) -> bool: if not isinstance(other, ExtraAttrs): return NotImplemented return self.attrs == other.attrs and self.immutable == other.immutable def copy(self) -> ExtraAttrs: return ExtraAttrs(self.attrs.copy(), self.immutable.copy(), self.mod_name) def __repr__(self) -> str: return f"ExtraAttrs({self.attrs!r}, {self.immutable!r}, {self.mod_name!r})" def serialize(self) -> JsonDict: return { ".class": "ExtraAttrs", "attrs": {k: v.serialize() for k, v in self.attrs.items()}, "immutable": list(self.immutable), "mod_name": self.mod_name, } @classmethod def deserialize(cls, data: JsonDict) -> ExtraAttrs: assert data[".class"] == "ExtraAttrs" return ExtraAttrs( {k: deserialize_type(v) for k, v in data["attrs"].items()}, set(data["immutable"]), data["mod_name"], ) class Instance(ProperType): """An instance type of form C[T1, ..., Tn]. The list of type variables may be empty. Several types have fallbacks to `Instance`, because in Python everything is an object and this concept is impossible to express without intersection types. We therefore use fallbacks for all "non-special" (like UninhabitedType, ErasedType etc) types. """ __slots__ = ("type", "args", "invalid", "type_ref", "last_known_value", "_hash", "extra_attrs") def __init__( self, typ: mypy.nodes.TypeInfo, args: Sequence[Type], line: int = -1, column: int = -1, *, last_known_value: LiteralType | None = None, extra_attrs: ExtraAttrs | None = None, ) -> None: super().__init__(line, column) self.type = typ self.args = tuple(args) self.type_ref: str | None = None # True if recovered after incorrect number of type arguments error self.invalid = False # This field keeps track of the underlying Literal[...] value associated with # this instance, if one is known. # # This field is set whenever possible within expressions, but is erased upon # variable assignment (see erasetype.remove_instance_last_known_values) unless # the variable is declared to be final. # # For example, consider the following program: # # a = 1 # b: Final[int] = 2 # c: Final = 3 # print(a + b + c + 4) # # The 'Instance' objects associated with the expressions '1', '2', '3', and '4' will # have last_known_values of type Literal[1], Literal[2], Literal[3], and Literal[4] # respectively. However, the Instance object assigned to 'a' and 'b' will have their # last_known_value erased: variable 'a' is mutable; variable 'b' was declared to be # specifically an int. # # Or more broadly, this field lets this Instance "remember" its original declaration # when applicable. We want this behavior because we want implicit Final declarations # to act pretty much identically with constants: we should be able to replace any # places where we use some Final variable with the original value and get the same # type-checking behavior. For example, we want this program: # # def expects_literal(x: Literal[3]) -> None: pass # var: Final = 3 # expects_literal(var) # # ...to type-check in the exact same way as if we had written the program like this: # # def expects_literal(x: Literal[3]) -> None: pass # expects_literal(3) # # In order to make this work (especially with literal types), we need var's type # (an Instance) to remember the "original" value. # # Preserving this value within expressions is useful for similar reasons. # # Currently most of mypy will ignore this field and will continue to treat this type like # a regular Instance. We end up using this field only when we are explicitly within a # Literal context. self.last_known_value = last_known_value # Cached hash value self._hash = -1 # Additional attributes defined per instance of this type. For example modules # have different attributes per instance of types.ModuleType. self.extra_attrs = extra_attrs def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_instance(self) def __hash__(self) -> int: if self._hash == -1: self._hash = hash((self.type, self.args, self.last_known_value, self.extra_attrs)) return self._hash def __eq__(self, other: object) -> bool: if not isinstance(other, Instance): return NotImplemented return ( self.type == other.type and self.args == other.args and self.last_known_value == other.last_known_value and self.extra_attrs == other.extra_attrs ) def serialize(self) -> JsonDict | str: assert self.type is not None type_ref = self.type.fullname if not self.args and not self.last_known_value: return type_ref data: JsonDict = {".class": "Instance"} data["type_ref"] = type_ref data["args"] = [arg.serialize() for arg in self.args] if self.last_known_value is not None: data["last_known_value"] = self.last_known_value.serialize() data["extra_attrs"] = self.extra_attrs.serialize() if self.extra_attrs else None return data @classmethod def deserialize(cls, data: JsonDict | str) -> Instance: if isinstance(data, str): inst = Instance(NOT_READY, []) inst.type_ref = data return inst assert data[".class"] == "Instance" args: list[Type] = [] if "args" in data: args_list = data["args"] assert isinstance(args_list, list) args = [deserialize_type(arg) for arg in args_list] inst = Instance(NOT_READY, args) inst.type_ref = data["type_ref"] # Will be fixed up by fixup.py later. if "last_known_value" in data: inst.last_known_value = LiteralType.deserialize(data["last_known_value"]) if data.get("extra_attrs") is not None: inst.extra_attrs = ExtraAttrs.deserialize(data["extra_attrs"]) return inst def copy_modified( self, *, args: Bogus[list[Type]] = _dummy, last_known_value: Bogus[LiteralType | None] = _dummy, ) -> Instance: new = Instance( typ=self.type, args=args if args is not _dummy else self.args, line=self.line, column=self.column, last_known_value=( last_known_value if last_known_value is not _dummy else self.last_known_value ), extra_attrs=self.extra_attrs, ) # We intentionally don't copy the extra_attrs here, so they will be erased. new.can_be_true = self.can_be_true new.can_be_false = self.can_be_false return new def copy_with_extra_attr(self, name: str, typ: Type) -> Instance: if self.extra_attrs: existing_attrs = self.extra_attrs.copy() else: existing_attrs = ExtraAttrs({}, set(), None) existing_attrs.attrs[name] = typ new = self.copy_modified() new.extra_attrs = existing_attrs return new def is_singleton_type(self) -> bool: # TODO: # Also make this return True if the type corresponds to NotImplemented? return ( self.type.is_enum and len(self.get_enum_values()) == 1 or self.type.fullname in {"builtins.ellipsis", "types.EllipsisType"} ) def get_enum_values(self) -> list[str]: """Return the list of values for an Enum.""" return [ name for name, sym in self.type.names.items() if isinstance(sym.node, mypy.nodes.Var) ] class FunctionLike(ProperType): """Abstract base class for function types.""" __slots__ = ("fallback",) fallback: Instance def __init__(self, line: int = -1, column: int = -1) -> None: super().__init__(line, column) self._can_be_false = False @abstractmethod def is_type_obj(self) -> bool: pass @abstractmethod def type_object(self) -> mypy.nodes.TypeInfo: pass @property @abstractmethod def items(self) -> list[CallableType]: pass @abstractmethod def with_name(self, name: str) -> FunctionLike: pass @abstractmethod def get_name(self) -> str | None: pass class FormalArgument(NamedTuple): name: str | None pos: int | None typ: Type required: bool class Parameters(ProperType): """Type that represents the parameters to a function. Used for ParamSpec analysis. Note that by convention we handle this type as a Callable without return type, not as a "tuple with names", so that it behaves contravariantly, in particular [x: int] <: [int]. """ __slots__ = ( "arg_types", "arg_kinds", "arg_names", "min_args", "is_ellipsis_args", # TODO: variables don't really belong here, but they are used to allow hacky support # for forall . Foo[[x: T], T] by capturing generic callable with ParamSpec, see #15909 "variables", "imprecise_arg_kinds", ) def __init__( self, arg_types: Sequence[Type], arg_kinds: list[ArgKind], arg_names: Sequence[str | None], *, variables: Sequence[TypeVarLikeType] | None = None, is_ellipsis_args: bool = False, imprecise_arg_kinds: bool = False, line: int = -1, column: int = -1, ) -> None: super().__init__(line, column) self.arg_types = list(arg_types) self.arg_kinds = arg_kinds self.arg_names = list(arg_names) assert len(arg_types) == len(arg_kinds) == len(arg_names) assert not any(isinstance(t, Parameters) for t in arg_types) self.min_args = arg_kinds.count(ARG_POS) self.is_ellipsis_args = is_ellipsis_args self.variables = variables or [] self.imprecise_arg_kinds = imprecise_arg_kinds def copy_modified( self, arg_types: Bogus[Sequence[Type]] = _dummy, arg_kinds: Bogus[list[ArgKind]] = _dummy, arg_names: Bogus[Sequence[str | None]] = _dummy, *, variables: Bogus[Sequence[TypeVarLikeType]] = _dummy, is_ellipsis_args: Bogus[bool] = _dummy, imprecise_arg_kinds: Bogus[bool] = _dummy, ) -> Parameters: return Parameters( arg_types=arg_types if arg_types is not _dummy else self.arg_types, arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds, arg_names=arg_names if arg_names is not _dummy else self.arg_names, is_ellipsis_args=( is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args ), variables=variables if variables is not _dummy else self.variables, imprecise_arg_kinds=( imprecise_arg_kinds if imprecise_arg_kinds is not _dummy else self.imprecise_arg_kinds ), ) # TODO: here is a lot of code duplication with Callable type, fix this. def var_arg(self) -> FormalArgument | None: """The formal argument for *args.""" for position, (type, kind) in enumerate(zip(self.arg_types, self.arg_kinds)): if kind == ARG_STAR: return FormalArgument(None, position, type, False) return None def kw_arg(self) -> FormalArgument | None: """The formal argument for **kwargs.""" for position, (type, kind) in enumerate(zip(self.arg_types, self.arg_kinds)): if kind == ARG_STAR2: return FormalArgument(None, position, type, False) return None def formal_arguments(self, include_star_args: bool = False) -> list[FormalArgument]: """Yields the formal arguments corresponding to this callable, ignoring *arg and **kwargs. To handle *args and **kwargs, use the 'callable.var_args' and 'callable.kw_args' fields, if they are not None. If you really want to include star args in the yielded output, set the 'include_star_args' parameter to 'True'.""" args = [] done_with_positional = False for i in range(len(self.arg_types)): kind = self.arg_kinds[i] if kind.is_named() or kind.is_star(): done_with_positional = True if not include_star_args and kind.is_star(): continue required = kind.is_required() pos = None if done_with_positional else i arg = FormalArgument(self.arg_names[i], pos, self.arg_types[i], required) args.append(arg) return args def argument_by_name(self, name: str | None) -> FormalArgument | None: if name is None: return None seen_star = False for i, (arg_name, kind, typ) in enumerate( zip(self.arg_names, self.arg_kinds, self.arg_types) ): # No more positional arguments after these. if kind.is_named() or kind.is_star(): seen_star = True if kind.is_star(): continue if arg_name == name: position = None if seen_star else i return FormalArgument(name, position, typ, kind.is_required()) return self.try_synthesizing_arg_from_kwarg(name) def argument_by_position(self, position: int | None) -> FormalArgument | None: if position is None: return None if position >= len(self.arg_names): return self.try_synthesizing_arg_from_vararg(position) name, kind, typ = ( self.arg_names[position], self.arg_kinds[position], self.arg_types[position], ) if kind.is_positional(): return FormalArgument(name, position, typ, kind == ARG_POS) else: return self.try_synthesizing_arg_from_vararg(position) def try_synthesizing_arg_from_kwarg(self, name: str | None) -> FormalArgument | None: kw_arg = self.kw_arg() if kw_arg is not None: return FormalArgument(name, None, kw_arg.typ, False) else: return None def try_synthesizing_arg_from_vararg(self, position: int | None) -> FormalArgument | None: var_arg = self.var_arg() if var_arg is not None: return FormalArgument(None, position, var_arg.typ, False) else: return None def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_parameters(self) def serialize(self) -> JsonDict: return { ".class": "Parameters", "arg_types": [t.serialize() for t in self.arg_types], "arg_kinds": [int(x.value) for x in self.arg_kinds], "arg_names": self.arg_names, "variables": [tv.serialize() for tv in self.variables], "imprecise_arg_kinds": self.imprecise_arg_kinds, } @classmethod def deserialize(cls, data: JsonDict) -> Parameters: assert data[".class"] == "Parameters" return Parameters( [deserialize_type(t) for t in data["arg_types"]], [ArgKind(x) for x in data["arg_kinds"]], data["arg_names"], variables=[cast(TypeVarLikeType, deserialize_type(v)) for v in data["variables"]], imprecise_arg_kinds=data["imprecise_arg_kinds"], ) def __hash__(self) -> int: return hash( ( self.is_ellipsis_args, tuple(self.arg_types), tuple(self.arg_names), tuple(self.arg_kinds), ) ) def __eq__(self, other: object) -> bool: if isinstance(other, (Parameters, CallableType)): return ( self.arg_types == other.arg_types and self.arg_names == other.arg_names and self.arg_kinds == other.arg_kinds and self.is_ellipsis_args == other.is_ellipsis_args ) else: return NotImplemented CT = TypeVar("CT", bound="CallableType") class CallableType(FunctionLike): """Type of a non-overloaded callable object (such as function).""" __slots__ = ( "arg_types", # Types of function arguments "arg_kinds", # ARG_ constants "arg_names", # Argument names; None if not a keyword argument "min_args", # Minimum number of arguments; derived from arg_kinds "ret_type", # Return value type "name", # Name (may be None; for error messages and plugins) "definition", # For error messages. May be None. "variables", # Type variables for a generic function "is_ellipsis_args", # Is this Callable[..., t] (with literal '...')? "implicit", # Was this type implicitly generated instead of explicitly # specified by the user? "special_sig", # Non-None for signatures that require special handling # (currently only value is 'dict' for a signature similar to # 'dict') "from_type_type", # Was this callable generated by analyzing Type[...] # instantiation? "bound_args", # Bound type args, mostly unused but may be useful for # tools that consume mypy ASTs "def_extras", # Information about original definition we want to serialize. # This is used for more detailed error messages. "type_guard", # T, if -> TypeGuard[T] (ret_type is bool in this case). "type_is", # T, if -> TypeIs[T] (ret_type is bool in this case). "from_concatenate", # whether this callable is from a concatenate object # (this is used for error messages) "imprecise_arg_kinds", "unpack_kwargs", # Was an Unpack[...] with **kwargs used to define this callable? ) def __init__( self, # maybe this should be refactored to take a Parameters object arg_types: Sequence[Type], arg_kinds: list[ArgKind], arg_names: Sequence[str | None], ret_type: Type, fallback: Instance, name: str | None = None, definition: SymbolNode | None = None, variables: Sequence[TypeVarLikeType] | None = None, line: int = -1, column: int = -1, is_ellipsis_args: bool = False, implicit: bool = False, special_sig: str | None = None, from_type_type: bool = False, bound_args: Sequence[Type | None] = (), def_extras: dict[str, Any] | None = None, type_guard: Type | None = None, type_is: Type | None = None, from_concatenate: bool = False, imprecise_arg_kinds: bool = False, unpack_kwargs: bool = False, ) -> None: super().__init__(line, column) assert len(arg_types) == len(arg_kinds) == len(arg_names) for t, k in zip(arg_types, arg_kinds): if isinstance(t, ParamSpecType): assert not t.prefix.arg_types # TODO: should we assert that only ARG_STAR contain ParamSpecType? # See testParamSpecJoin, that relies on passing e.g `P.args` as plain argument. if variables is None: variables = [] self.arg_types = list(arg_types) self.arg_kinds = arg_kinds self.arg_names = list(arg_names) self.min_args = arg_kinds.count(ARG_POS) self.ret_type = ret_type self.fallback = fallback assert not name or "<bound method" not in name self.name = name self.definition = definition self.variables = variables self.is_ellipsis_args = is_ellipsis_args self.implicit = implicit self.special_sig = special_sig self.from_type_type = from_type_type self.from_concatenate = from_concatenate self.imprecise_arg_kinds = imprecise_arg_kinds if not bound_args: bound_args = () self.bound_args = bound_args if def_extras: self.def_extras = def_extras elif isinstance(definition, FuncDef): # This information would be lost if we don't have definition # after serialization, but it is useful in error messages. # TODO: decide how to add more info here (file, line, column) # without changing interface hash. first_arg: str | None = None if definition.arg_names and definition.info and not definition.is_static: if getattr(definition, "arguments", None): first_arg = definition.arguments[0].variable.name else: first_arg = definition.arg_names[0] self.def_extras = {"first_arg": first_arg} else: self.def_extras = {} self.type_guard = type_guard self.type_is = type_is self.unpack_kwargs = unpack_kwargs def copy_modified( self: CT, arg_types: Bogus[Sequence[Type]] = _dummy, arg_kinds: Bogus[list[ArgKind]] = _dummy, arg_names: Bogus[Sequence[str | None]] = _dummy, ret_type: Bogus[Type] = _dummy, fallback: Bogus[Instance] = _dummy, name: Bogus[str | None] = _dummy, definition: Bogus[SymbolNode] = _dummy, variables: Bogus[Sequence[TypeVarLikeType]] = _dummy, line: int = _dummy_int, column: int = _dummy_int, is_ellipsis_args: Bogus[bool] = _dummy, implicit: Bogus[bool] = _dummy, special_sig: Bogus[str | None] = _dummy, from_type_type: Bogus[bool] = _dummy, bound_args: Bogus[list[Type | None]] = _dummy, def_extras: Bogus[dict[str, Any]] = _dummy, type_guard: Bogus[Type | None] = _dummy, type_is: Bogus[Type | None] = _dummy, from_concatenate: Bogus[bool] = _dummy, imprecise_arg_kinds: Bogus[bool] = _dummy, unpack_kwargs: Bogus[bool] = _dummy, ) -> CT: modified = CallableType( arg_types=arg_types if arg_types is not _dummy else self.arg_types, arg_kinds=arg_kinds if arg_kinds is not _dummy else self.arg_kinds, arg_names=arg_names if arg_names is not _dummy else self.arg_names, ret_type=ret_type if ret_type is not _dummy else self.ret_type, fallback=fallback if fallback is not _dummy else self.fallback, name=name if name is not _dummy else self.name, definition=definition if definition is not _dummy else self.definition, variables=variables if variables is not _dummy else self.variables, line=line if line != _dummy_int else self.line, column=column if column != _dummy_int else self.column, is_ellipsis_args=( is_ellipsis_args if is_ellipsis_args is not _dummy else self.is_ellipsis_args ), implicit=implicit if implicit is not _dummy else self.implicit, special_sig=special_sig if special_sig is not _dummy else self.special_sig, from_type_type=from_type_type if from_type_type is not _dummy else self.from_type_type, bound_args=bound_args if bound_args is not _dummy else self.bound_args, def_extras=def_extras if def_extras is not _dummy else dict(self.def_extras), type_guard=type_guard if type_guard is not _dummy else self.type_guard, type_is=type_is if type_is is not _dummy else self.type_is, from_concatenate=( from_concatenate if from_concatenate is not _dummy else self.from_concatenate ), imprecise_arg_kinds=( imprecise_arg_kinds if imprecise_arg_kinds is not _dummy else self.imprecise_arg_kinds ), unpack_kwargs=unpack_kwargs if unpack_kwargs is not _dummy else self.unpack_kwargs, ) # Optimization: Only NewTypes are supported as subtypes since # the class is effectively final, so we can use a cast safely. return cast(CT, modified) def var_arg(self) -> FormalArgument | None: """The formal argument for *args.""" for position, (type, kind) in enumerate(zip(self.arg_types, self.arg_kinds)): if kind == ARG_STAR: return FormalArgument(None, position, type, False) return None def kw_arg(self) -> FormalArgument | None: """The formal argument for **kwargs.""" for position, (type, kind) in enumerate(zip(self.arg_types, self.arg_kinds)): if kind == ARG_STAR2: return FormalArgument(None, position, type, False) return None @property def is_var_arg(self) -> bool: """Does this callable have a *args argument?""" return ARG_STAR in self.arg_kinds @property def is_kw_arg(self) -> bool: """Does this callable have a **kwargs argument?""" return ARG_STAR2 in self.arg_kinds def is_type_obj(self) -> bool: return self.fallback.type.is_metaclass() and not isinstance( get_proper_type(self.ret_type), UninhabitedType ) def type_object(self) -> mypy.nodes.TypeInfo: assert self.is_type_obj() ret = get_proper_type(self.ret_type) if isinstance(ret, TypeVarType): ret = get_proper_type(ret.upper_bound) if isinstance(ret, TupleType): ret = ret.partial_fallback if isinstance(ret, TypedDictType): ret = ret.fallback assert isinstance(ret, Instance) return ret.type def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_callable_type(self) def with_name(self, name: str) -> CallableType: """Return a copy of this type with the specified name.""" return self.copy_modified(ret_type=self.ret_type, name=name) def get_name(self) -> str | None: return self.name def max_possible_positional_args(self) -> int: """Returns maximum number of positional arguments this method could possibly accept. This takes into account *arg and **kwargs but excludes keyword-only args.""" if self.is_var_arg or self.is_kw_arg: return sys.maxsize return sum(kind.is_positional() for kind in self.arg_kinds) def formal_arguments(self, include_star_args: bool = False) -> list[FormalArgument]: """Return a list of the formal arguments of this callable, ignoring *arg and **kwargs. To handle *args and **kwargs, use the 'callable.var_args' and 'callable.kw_args' fields, if they are not None. If you really want to include star args in the yielded output, set the 'include_star_args' parameter to 'True'.""" args = [] done_with_positional = False for i in range(len(self.arg_types)): kind = self.arg_kinds[i] if kind.is_named() or kind.is_star(): done_with_positional = True if not include_star_args and kind.is_star(): continue required = kind.is_required() pos = None if done_with_positional else i arg = FormalArgument(self.arg_names[i], pos, self.arg_types[i], required) args.append(arg) return args def argument_by_name(self, name: str | None) -> FormalArgument | None: if name is None: return None seen_star = False for i, (arg_name, kind, typ) in enumerate( zip(self.arg_names, self.arg_kinds, self.arg_types) ): # No more positional arguments after these. if kind.is_named() or kind.is_star(): seen_star = True if kind.is_star(): continue if arg_name == name: position = None if seen_star else i return FormalArgument(name, position, typ, kind.is_required()) return self.try_synthesizing_arg_from_kwarg(name) def argument_by_position(self, position: int | None) -> FormalArgument | None: if position is None: return None if position >= len(self.arg_names): return self.try_synthesizing_arg_from_vararg(position) name, kind, typ = ( self.arg_names[position], self.arg_kinds[position], self.arg_types[position], ) if kind.is_positional(): return FormalArgument(name, position, typ, kind == ARG_POS) else: return self.try_synthesizing_arg_from_vararg(position) def try_synthesizing_arg_from_kwarg(self, name: str | None) -> FormalArgument | None: kw_arg = self.kw_arg() if kw_arg is not None: return FormalArgument(name, None, kw_arg.typ, False) else: return None def try_synthesizing_arg_from_vararg(self, position: int | None) -> FormalArgument | None: var_arg = self.var_arg() if var_arg is not None: return FormalArgument(None, position, var_arg.typ, False) else: return None @property def items(self) -> list[CallableType]: return [self] def is_generic(self) -> bool: return bool(self.variables) def type_var_ids(self) -> list[TypeVarId]: a: list[TypeVarId] = [] for tv in self.variables: a.append(tv.id) return a def param_spec(self) -> ParamSpecType | None: """Return ParamSpec if callable can be called with one. A Callable accepting ParamSpec P args (*args, **kwargs) must have the two final parameters like this: *args: P.args, **kwargs: P.kwargs. """ if len(self.arg_types) < 2: return None if self.arg_kinds[-2] != ARG_STAR or self.arg_kinds[-1] != ARG_STAR2: return None arg_type = self.arg_types[-2] if not isinstance(arg_type, ParamSpecType): return None # Prepend prefix for def f(prefix..., *args: P.args, **kwargs: P.kwargs) -> ... # TODO: confirm that all arg kinds are positional prefix = Parameters(self.arg_types[:-2], self.arg_kinds[:-2], self.arg_names[:-2]) return arg_type.copy_modified(flavor=ParamSpecFlavor.BARE, prefix=prefix) def normalize_trivial_unpack(self) -> None: # Normalize trivial unpack in var args as *args: *tuple[X, ...] -> *args: X in place. if self.is_var_arg: star_index = self.arg_kinds.index(ARG_STAR) star_type = self.arg_types[star_index] if isinstance(star_type, UnpackType): p_type = get_proper_type(star_type.type) if isinstance(p_type, Instance): assert p_type.type.fullname == "builtins.tuple" self.arg_types[star_index] = p_type.args[0] def with_unpacked_kwargs(self) -> NormalizedCallableType: if not self.unpack_kwargs: return cast(NormalizedCallableType, self) last_type = get_proper_type(self.arg_types[-1]) assert isinstance(last_type, TypedDictType) extra_kinds = [ ArgKind.ARG_NAMED if name in last_type.required_keys else ArgKind.ARG_NAMED_OPT for name in last_type.items ] new_arg_kinds = self.arg_kinds[:-1] + extra_kinds new_arg_names = self.arg_names[:-1] + list(last_type.items) new_arg_types = self.arg_types[:-1] + list(last_type.items.values()) return NormalizedCallableType( self.copy_modified( arg_kinds=new_arg_kinds, arg_names=new_arg_names, arg_types=new_arg_types, unpack_kwargs=False, ) ) def with_normalized_var_args(self) -> Self: var_arg = self.var_arg() if not var_arg or not isinstance(var_arg.typ, UnpackType): return self unpacked = get_proper_type(var_arg.typ.type) if not isinstance(unpacked, TupleType): # Note that we don't normalize *args: *tuple[X, ...] -> *args: X, # this should be done once in semanal_typeargs.py for user-defined types, # and we ourselves rarely construct such type. return self unpack_index = find_unpack_in_list(unpacked.items) if unpack_index == 0 and len(unpacked.items) > 1: # Already normalized. return self # Boilerplate: var_arg_index = self.arg_kinds.index(ARG_STAR) types_prefix = self.arg_types[:var_arg_index] kinds_prefix = self.arg_kinds[:var_arg_index] names_prefix = self.arg_names[:var_arg_index] types_suffix = self.arg_types[var_arg_index + 1 :] kinds_suffix = self.arg_kinds[var_arg_index + 1 :] names_suffix = self.arg_names[var_arg_index + 1 :] no_name: str | None = None # to silence mypy # Now we have something non-trivial to do. if unpack_index is None: # Plain *Tuple[X, Y, Z] -> replace with ARG_POS completely types_middle = unpacked.items kinds_middle = [ARG_POS] * len(unpacked.items) names_middle = [no_name] * len(unpacked.items) else: # *Tuple[X, *Ts, Y, Z] or *Tuple[X, *tuple[T, ...], X, Z], here # we replace the prefix by ARG_POS (this is how some places expect # Callables to be represented) nested_unpack = unpacked.items[unpack_index] assert isinstance(nested_unpack, UnpackType) nested_unpacked = get_proper_type(nested_unpack.type) if unpack_index == len(unpacked.items) - 1: # Normalize also single item tuples like # *args: *Tuple[*tuple[X, ...]] -> *args: X # *args: *Tuple[*Ts] -> *args: *Ts # This may be not strictly necessary, but these are very verbose. if isinstance(nested_unpacked, Instance): assert nested_unpacked.type.fullname == "builtins.tuple" new_unpack = nested_unpacked.args[0] else: if not isinstance(nested_unpacked, TypeVarTupleType): # We found a non-nomralized tuple type, this means this method # is called during semantic analysis (e.g. from get_proper_type()) # there is no point in normalizing callables at this stage. return self new_unpack = nested_unpack else: new_unpack = UnpackType( unpacked.copy_modified(items=unpacked.items[unpack_index:]) ) types_middle = unpacked.items[:unpack_index] + [new_unpack] kinds_middle = [ARG_POS] * unpack_index + [ARG_STAR] names_middle = [no_name] * unpack_index + [self.arg_names[var_arg_index]] return self.copy_modified( arg_types=types_prefix + types_middle + types_suffix, arg_kinds=kinds_prefix + kinds_middle + kinds_suffix, arg_names=names_prefix + names_middle + names_suffix, ) def __hash__(self) -> int: # self.is_type_obj() will fail if self.fallback.type is a FakeInfo if isinstance(self.fallback.type, FakeInfo): is_type_obj = 2 else: is_type_obj = self.is_type_obj() return hash( ( self.ret_type, is_type_obj, self.is_ellipsis_args, self.name, tuple(self.arg_types), tuple(self.arg_names), tuple(self.arg_kinds), self.fallback, ) ) def __eq__(self, other: object) -> bool: if isinstance(other, CallableType): return ( self.ret_type == other.ret_type and self.arg_types == other.arg_types and self.arg_names == other.arg_names and self.arg_kinds == other.arg_kinds and self.name == other.name and self.is_type_obj() == other.is_type_obj() and self.is_ellipsis_args == other.is_ellipsis_args and self.fallback == other.fallback ) else: return NotImplemented def serialize(self) -> JsonDict: # TODO: As an optimization, leave out everything related to # generic functions for non-generic functions. return { ".class": "CallableType", "arg_types": [t.serialize() for t in self.arg_types], "arg_kinds": [int(x.value) for x in self.arg_kinds], "arg_names": self.arg_names, "ret_type": self.ret_type.serialize(), "fallback": self.fallback.serialize(), "name": self.name, # We don't serialize the definition (only used for error messages). "variables": [v.serialize() for v in self.variables], "is_ellipsis_args": self.is_ellipsis_args, "implicit": self.implicit, "bound_args": [(None if t is None else t.serialize()) for t in self.bound_args], "def_extras": dict(self.def_extras), "type_guard": self.type_guard.serialize() if self.type_guard is not None else None, "type_is": (self.type_is.serialize() if self.type_is is not None else None), "from_concatenate": self.from_concatenate, "imprecise_arg_kinds": self.imprecise_arg_kinds, "unpack_kwargs": self.unpack_kwargs, } @classmethod def deserialize(cls, data: JsonDict) -> CallableType: assert data[".class"] == "CallableType" # TODO: Set definition to the containing SymbolNode? return CallableType( [deserialize_type(t) for t in data["arg_types"]], [ArgKind(x) for x in data["arg_kinds"]], data["arg_names"], deserialize_type(data["ret_type"]), Instance.deserialize(data["fallback"]), name=data["name"], variables=[cast(TypeVarLikeType, deserialize_type(v)) for v in data["variables"]], is_ellipsis_args=data["is_ellipsis_args"], implicit=data["implicit"], bound_args=[(None if t is None else deserialize_type(t)) for t in data["bound_args"]], def_extras=data["def_extras"], type_guard=( deserialize_type(data["type_guard"]) if data["type_guard"] is not None else None ), type_is=(deserialize_type(data["type_is"]) if data["type_is"] is not None else None), from_concatenate=data["from_concatenate"], imprecise_arg_kinds=data["imprecise_arg_kinds"], unpack_kwargs=data["unpack_kwargs"], ) # This is a little safety net to prevent reckless special-casing of callables # that can potentially break Unpack[...] with **kwargs. # TODO: use this in more places in checkexpr.py etc? NormalizedCallableType = NewType("NormalizedCallableType", CallableType) class Overloaded(FunctionLike): """Overloaded function type T1, ... Tn, where each Ti is CallableType. The variant to call is chosen based on static argument types. Overloaded function types can only be defined in stub files, and thus there is no explicit runtime dispatch implementation. """ __slots__ = ("_items",) _items: list[CallableType] # Must not be empty def __init__(self, items: list[CallableType]) -> None: super().__init__(items[0].line, items[0].column) self._items = items self.fallback = items[0].fallback @property def items(self) -> list[CallableType]: return self._items def name(self) -> str | None: return self.get_name() def is_type_obj(self) -> bool: # All the items must have the same type object status, so it's # sufficient to query only (any) one of them. return self._items[0].is_type_obj() def type_object(self) -> mypy.nodes.TypeInfo: # All the items must have the same type object, so it's sufficient to # query only (any) one of them. return self._items[0].type_object() def with_name(self, name: str) -> Overloaded: ni: list[CallableType] = [] for it in self._items: ni.append(it.with_name(name)) return Overloaded(ni) def get_name(self) -> str | None: return self._items[0].name def with_unpacked_kwargs(self) -> Overloaded: if any(i.unpack_kwargs for i in self.items): return Overloaded([i.with_unpacked_kwargs() for i in self.items]) return self def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_overloaded(self) def __hash__(self) -> int: return hash(tuple(self.items)) def __eq__(self, other: object) -> bool: if not isinstance(other, Overloaded): return NotImplemented return self.items == other.items def serialize(self) -> JsonDict: return {".class": "Overloaded", "items": [t.serialize() for t in self.items]} @classmethod def deserialize(cls, data: JsonDict) -> Overloaded: assert data[".class"] == "Overloaded" return Overloaded([CallableType.deserialize(t) for t in data["items"]]) class TupleType(ProperType): """The tuple type Tuple[T1, ..., Tn] (at least one type argument). Instance variables: items: Tuple item types partial_fallback: The (imprecise) underlying instance type that is used for non-tuple methods. This is generally builtins.tuple[Any, ...] for regular tuples, but it's different for named tuples and classes with a tuple base class. Use mypy.typeops.tuple_fallback to calculate the precise fallback type derived from item types. implicit: If True, derived from a tuple expression (t,....) instead of Tuple[t, ...] """ __slots__ = ("items", "partial_fallback", "implicit") items: list[Type] partial_fallback: Instance implicit: bool def __init__( self, items: list[Type], fallback: Instance, line: int = -1, column: int = -1, implicit: bool = False, ) -> None: super().__init__(line, column) self.partial_fallback = fallback self.items = items self.implicit = implicit def can_be_true_default(self) -> bool: if self.can_be_any_bool(): # Corner case: it is a `NamedTuple` with `__bool__` method defined. # It can be anything: both `True` and `False`. return True return self.length() > 0 def can_be_false_default(self) -> bool: if self.can_be_any_bool(): # Corner case: it is a `NamedTuple` with `__bool__` method defined. # It can be anything: both `True` and `False`. return True if self.length() == 0: return True if self.length() > 1: return False # Special case tuple[*Ts] may or may not be false. item = self.items[0] if not isinstance(item, UnpackType): return False if not isinstance(item.type, TypeVarTupleType): # Non-normalized tuple[int, ...] can be false. return True return item.type.min_len == 0 def can_be_any_bool(self) -> bool: return bool( self.partial_fallback.type and self.partial_fallback.type.fullname != "builtins.tuple" and self.partial_fallback.type.names.get("__bool__") ) def length(self) -> int: return len(self.items) def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_tuple_type(self) def __hash__(self) -> int: return hash((tuple(self.items), self.partial_fallback)) def __eq__(self, other: object) -> bool: if not isinstance(other, TupleType): return NotImplemented return self.items == other.items and self.partial_fallback == other.partial_fallback def serialize(self) -> JsonDict: return { ".class": "TupleType", "items": [t.serialize() for t in self.items], "partial_fallback": self.partial_fallback.serialize(), "implicit": self.implicit, } @classmethod def deserialize(cls, data: JsonDict) -> TupleType: assert data[".class"] == "TupleType" return TupleType( [deserialize_type(t) for t in data["items"]], Instance.deserialize(data["partial_fallback"]), implicit=data["implicit"], ) def copy_modified( self, *, fallback: Instance | None = None, items: list[Type] | None = None ) -> TupleType: if fallback is None: fallback = self.partial_fallback if items is None: items = self.items return TupleType(items, fallback, self.line, self.column) def slice( self, begin: int | None, end: int | None, stride: int | None, *, fallback: Instance | None ) -> TupleType | None: if fallback is None: fallback = self.partial_fallback if any(isinstance(t, UnpackType) for t in self.items): total = len(self.items) unpack_index = find_unpack_in_list(self.items) assert unpack_index is not None if begin is None and end is None: # We special-case this to support reversing variadic tuples. # General support for slicing is tricky, so we handle only simple cases. if stride == -1: slice_items = self.items[::-1] elif stride is None or stride == 1: slice_items = self.items else: return None elif (begin is None or unpack_index >= begin >= 0) and ( end is not None and unpack_index >= end >= 0 ): # Start and end are in the prefix, everything works in this case. slice_items = self.items[begin:end:stride] elif (begin is not None and unpack_index - total < begin < 0) and ( end is None or unpack_index - total < end < 0 ): # Start and end are in the suffix, everything works in this case. slice_items = self.items[begin:end:stride] elif (begin is None or unpack_index >= begin >= 0) and ( end is None or unpack_index - total < end < 0 ): # Start in the prefix, end in the suffix, we can support only trivial strides. if stride is None or stride == 1: slice_items = self.items[begin:end:stride] else: return None elif (begin is not None and unpack_index - total < begin < 0) and ( end is not None and unpack_index >= end >= 0 ): # Start in the suffix, end in the prefix, we can support only trivial strides. if stride is None or stride == -1: slice_items = self.items[begin:end:stride] else: return None else: # TODO: there some additional cases we can support for homogeneous variadic # items, we can "eat away" finite number of items. return None else: slice_items = self.items[begin:end:stride] return TupleType(slice_items, fallback, self.line, self.column, self.implicit) class TypedDictType(ProperType): """Type of TypedDict object {'k1': v1, ..., 'kn': vn}. A TypedDict object is a dictionary with specific string (literal) keys. Each key has a value with a distinct type that depends on the key. TypedDict objects are normal dict objects at runtime. A TypedDictType can be either named or anonymous. If it's anonymous, its fallback will be typing_extensions._TypedDict (Instance). _TypedDict is a subclass of Mapping[str, object] and defines all non-mapping dict methods that TypedDict supports. Some dict methods are unsafe and not supported. _TypedDict isn't defined at runtime. If a TypedDict is named, its fallback will be an Instance of the named type (ex: "Point") whose TypeInfo has a typeddict_type that is anonymous. This is similar to how named tuples work. TODO: The fallback structure is perhaps overly complicated. """ __slots__ = ( "items", "required_keys", "readonly_keys", "fallback", "extra_items_from", "to_be_mutated", ) items: dict[str, Type] # item_name -> item_type required_keys: set[str] readonly_keys: set[str] fallback: Instance extra_items_from: list[ProperType] # only used during semantic analysis to_be_mutated: bool # only used in a plugin for `.update`, `|=`, etc def __init__( self, items: dict[str, Type], required_keys: set[str], readonly_keys: set[str], fallback: Instance, line: int = -1, column: int = -1, ) -> None: super().__init__(line, column) self.items = items self.required_keys = required_keys self.readonly_keys = readonly_keys self.fallback = fallback self.can_be_true = len(self.items) > 0 self.can_be_false = len(self.required_keys) == 0 self.extra_items_from = [] self.to_be_mutated = False def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_typeddict_type(self) def __hash__(self) -> int: return hash( ( frozenset(self.items.items()), self.fallback, frozenset(self.required_keys), frozenset(self.readonly_keys), ) ) def __eq__(self, other: object) -> bool: if not isinstance(other, TypedDictType): return NotImplemented if self is other: return True return ( frozenset(self.items.keys()) == frozenset(other.items.keys()) and all( left_item_type == right_item_type for (_, left_item_type, right_item_type) in self.zip(other) ) and self.fallback == other.fallback and self.required_keys == other.required_keys and self.readonly_keys == other.readonly_keys ) def serialize(self) -> JsonDict: return { ".class": "TypedDictType", "items": [[n, t.serialize()] for (n, t) in self.items.items()], "required_keys": sorted(self.required_keys), "readonly_keys": sorted(self.readonly_keys), "fallback": self.fallback.serialize(), } @classmethod def deserialize(cls, data: JsonDict) -> TypedDictType: assert data[".class"] == "TypedDictType" return TypedDictType( {n: deserialize_type(t) for (n, t) in data["items"]}, set(data["required_keys"]), set(data["readonly_keys"]), Instance.deserialize(data["fallback"]), ) @property def is_final(self) -> bool: return self.fallback.type.is_final def is_anonymous(self) -> bool: return self.fallback.type.fullname in TPDICT_FB_NAMES def as_anonymous(self) -> TypedDictType: if self.is_anonymous(): return self assert self.fallback.type.typeddict_type is not None return self.fallback.type.typeddict_type.as_anonymous() def copy_modified( self, *, fallback: Instance | None = None, item_types: list[Type] | None = None, item_names: list[str] | None = None, required_keys: set[str] | None = None, readonly_keys: set[str] | None = None, ) -> TypedDictType: if fallback is None: fallback = self.fallback if item_types is None: items = self.items else: items = dict(zip(self.items, item_types)) if required_keys is None: required_keys = self.required_keys if readonly_keys is None: readonly_keys = self.readonly_keys if item_names is not None: items = {k: v for (k, v) in items.items() if k in item_names} required_keys &= set(item_names) return TypedDictType(items, required_keys, readonly_keys, fallback, self.line, self.column) def create_anonymous_fallback(self) -> Instance: anonymous = self.as_anonymous() return anonymous.fallback def names_are_wider_than(self, other: TypedDictType) -> bool: return len(other.items.keys() - self.items.keys()) == 0 def zip(self, right: TypedDictType) -> Iterable[tuple[str, Type, Type]]: left = self for item_name, left_item_type in left.items.items(): right_item_type = right.items.get(item_name) if right_item_type is not None: yield (item_name, left_item_type, right_item_type) def zipall(self, right: TypedDictType) -> Iterable[tuple[str, Type | None, Type | None]]: left = self for item_name, left_item_type in left.items.items(): right_item_type = right.items.get(item_name) yield (item_name, left_item_type, right_item_type) for item_name, right_item_type in right.items.items(): if item_name in left.items: continue yield (item_name, None, right_item_type) class RawExpressionType(ProperType): """A synthetic type representing some arbitrary expression that does not cleanly translate into a type. This synthetic type is only used at the beginning stages of semantic analysis and should be completely removing during the process for mapping UnboundTypes to actual types: we either turn it into a LiteralType or an AnyType. For example, suppose `Foo[1]` is initially represented as the following: UnboundType( name='Foo', args=[ RawExpressionType(value=1, base_type_name='builtins.int'), ], ) As we perform semantic analysis, this type will transform into one of two possible forms. If 'Foo' was an alias for 'Literal' all along, this type is transformed into: LiteralType(value=1, fallback=int_instance_here) Alternatively, if 'Foo' is an unrelated class, we report an error and instead produce something like this: Instance(type=typeinfo_for_foo, args=[AnyType(TypeOfAny.from_error)) If the "note" field is not None, the provided note will be reported alongside the error at this point. Note: if "literal_value" is None, that means this object is representing some expression that cannot possibly be a parameter of Literal[...]. For example, "Foo[3j]" would be represented as: UnboundType( name='Foo', args=[ RawExpressionType(value=None, base_type_name='builtins.complex'), ], ) """ __slots__ = ("literal_value", "base_type_name", "note") def __init__( self, literal_value: LiteralValue | None, base_type_name: str, line: int = -1, column: int = -1, note: str | None = None, ) -> None: super().__init__(line, column) self.literal_value = literal_value self.base_type_name = base_type_name self.note = note def simple_name(self) -> str: return self.base_type_name.replace("builtins.", "") def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) ret: T = visitor.visit_raw_expression_type(self) return ret def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" def __hash__(self) -> int: return hash((self.literal_value, self.base_type_name)) def __eq__(self, other: object) -> bool: if isinstance(other, RawExpressionType): return ( self.base_type_name == other.base_type_name and self.literal_value == other.literal_value ) else: return NotImplemented class LiteralType(ProperType): """The type of a Literal instance. Literal[Value] A Literal always consists of: 1. A native Python object corresponding to the contained inner value 2. A fallback for this Literal. The fallback also corresponds to the parent type this Literal subtypes. For example, 'Literal[42]' is represented as 'LiteralType(value=42, fallback=instance_of_int)' As another example, `Literal[Color.RED]` (where Color is an enum) is represented as `LiteralType(value="RED", fallback=instance_of_color)'. """ __slots__ = ("value", "fallback", "_hash") def __init__( self, value: LiteralValue, fallback: Instance, line: int = -1, column: int = -1 ) -> None: super().__init__(line, column) self.value = value self.fallback = fallback self._hash = -1 # Cached hash value def can_be_false_default(self) -> bool: return not self.value def can_be_true_default(self) -> bool: return bool(self.value) def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_literal_type(self) def __hash__(self) -> int: if self._hash == -1: self._hash = hash((self.value, self.fallback)) return self._hash def __eq__(self, other: object) -> bool: if isinstance(other, LiteralType): return self.fallback == other.fallback and self.value == other.value else: return NotImplemented def is_enum_literal(self) -> bool: return self.fallback.type.is_enum def value_repr(self) -> str: """Returns the string representation of the underlying type. This function is almost equivalent to running `repr(self.value)`, except it includes some additional logic to correctly handle cases where the value is a string, byte string, a unicode string, or an enum. """ raw = repr(self.value) fallback_name = self.fallback.type.fullname # If this is backed by an enum, if self.is_enum_literal(): return f"{fallback_name}.{self.value}" if fallback_name == "builtins.bytes": # Note: 'builtins.bytes' only appears in Python 3, so we want to # explicitly prefix with a "b" return "b" + raw else: # 'builtins.str' could mean either depending on context, but either way # we don't prefix: it's the "native" string. And of course, if value is # some other type, we just return that string repr directly. return raw def serialize(self) -> JsonDict | str: return { ".class": "LiteralType", "value": self.value, "fallback": self.fallback.serialize(), } @classmethod def deserialize(cls, data: JsonDict) -> LiteralType: assert data[".class"] == "LiteralType" return LiteralType(value=data["value"], fallback=Instance.deserialize(data["fallback"])) def is_singleton_type(self) -> bool: return self.is_enum_literal() or isinstance(self.value, bool) class UnionType(ProperType): """The union type Union[T1, ..., Tn] (at least one type argument).""" __slots__ = ( "items", "is_evaluated", "uses_pep604_syntax", "original_str_expr", "original_str_fallback", ) def __init__( self, items: Sequence[Type], line: int = -1, column: int = -1, *, is_evaluated: bool = True, uses_pep604_syntax: bool = False, ) -> None: super().__init__(line, column) # We must keep this false to avoid crashes during semantic analysis. # TODO: maybe switch this to True during type-checking pass? self.items = flatten_nested_unions(items, handle_type_alias_type=False) # is_evaluated should be set to false for type comments and string literals self.is_evaluated = is_evaluated # uses_pep604_syntax is True if Union uses OR syntax (X | Y) self.uses_pep604_syntax = uses_pep604_syntax # The meaning of these two is the same as for UnboundType. A UnionType can be # return by type parser from a string "A|B", and we need to be able to fall back # to plain string, when such a string appears inside a Literal[...]. self.original_str_expr: str | None = None self.original_str_fallback: str | None = None def can_be_true_default(self) -> bool: return any(item.can_be_true for item in self.items) def can_be_false_default(self) -> bool: return any(item.can_be_false for item in self.items) def __hash__(self) -> int: return hash(frozenset(self.items)) def __eq__(self, other: object) -> bool: if not isinstance(other, UnionType): return NotImplemented return frozenset(self.items) == frozenset(other.items) @overload @staticmethod def make_union( items: Sequence[ProperType], line: int = -1, column: int = -1 ) -> ProperType: ... @overload @staticmethod def make_union(items: Sequence[Type], line: int = -1, column: int = -1) -> Type: ... @staticmethod def make_union(items: Sequence[Type], line: int = -1, column: int = -1) -> Type: if len(items) > 1: return UnionType(items, line, column) elif len(items) == 1: return items[0] else: return UninhabitedType() def length(self) -> int: return len(self.items) def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_union_type(self) def relevant_items(self) -> list[Type]: """Removes NoneTypes from Unions when strict Optional checking is off.""" if state.strict_optional: return self.items else: return [i for i in self.items if not isinstance(get_proper_type(i), NoneType)] def serialize(self) -> JsonDict: return { ".class": "UnionType", "items": [t.serialize() for t in self.items], "uses_pep604_syntax": self.uses_pep604_syntax, } @classmethod def deserialize(cls, data: JsonDict) -> UnionType: assert data[".class"] == "UnionType" return UnionType( [deserialize_type(t) for t in data["items"]], uses_pep604_syntax=data["uses_pep604_syntax"], ) class PartialType(ProperType): """Type such as List[?] where type arguments are unknown, or partial None type. These are used for inferring types in multiphase initialization such as this: x = [] # x gets a partial type List[?], as item type is unknown x.append(1) # partial type gets replaced with normal type List[int] Or with None: x = None # x gets a partial type None if c: x = 1 # Infer actual type int for x """ __slots__ = ("type", "var", "value_type") # None for the 'None' partial type; otherwise a generic class type: mypy.nodes.TypeInfo | None var: mypy.nodes.Var # For partial defaultdict[K, V], the type V (K is unknown). If V is generic, # the type argument is Any and will be replaced later. value_type: Instance | None def __init__( self, type: mypy.nodes.TypeInfo | None, var: mypy.nodes.Var, value_type: Instance | None = None, ) -> None: super().__init__() self.type = type self.var = var self.value_type = value_type def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_partial_type(self) class EllipsisType(ProperType): """The type ... (ellipsis). This is not a real type but a syntactic AST construct, used in Callable[..., T], for example. A semantically analyzed type will never have ellipsis types. """ __slots__ = () def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) ret: T = visitor.visit_ellipsis_type(self) return ret def serialize(self) -> JsonDict: assert False, "Synthetic types don't serialize" class TypeType(ProperType): """For types like Type[User]. This annotates variables that are class objects, constrained by the type argument. See PEP 484 for more details. We may encounter expressions whose values are specific classes; those are represented as callables (possibly overloaded) corresponding to the class's constructor's signature and returning an instance of that class. The difference with Type[C] is that those callables always represent the exact class given as the return type; Type[C] represents any class that's a subclass of C, and C may also be a type variable or a union (or Any). Many questions around subtype relationships between Type[C1] and def(...) -> C2 are answered by looking at the subtype relationships between C1 and C2, since Type[] is considered covariant. There's an unsolved problem with constructor signatures (also unsolved in PEP 484): calling a variable whose type is Type[C] assumes the constructor signature for C, even though a subclass of C might completely change the constructor signature. For now we just assume that users of Type[C] are careful not to do that (in the future we might detect when they are violating that assumption). """ __slots__ = ("item",) # This can't be everything, but it can be a class reference, # a generic class instance, a union, Any, a type variable... item: ProperType def __init__( self, item: Bogus[Instance | AnyType | TypeVarType | TupleType | NoneType | CallableType], *, line: int = -1, column: int = -1, ) -> None: """To ensure Type[Union[A, B]] is always represented as Union[Type[A], Type[B]], item of type UnionType must be handled through make_normalized static method. """ super().__init__(line, column) self.item = item @staticmethod def make_normalized(item: Type, *, line: int = -1, column: int = -1) -> ProperType: item = get_proper_type(item) if isinstance(item, UnionType): return UnionType.make_union( [TypeType.make_normalized(union_item) for union_item in item.items], line=line, column=column, ) return TypeType(item, line=line, column=column) # type: ignore[arg-type] def accept(self, visitor: TypeVisitor[T]) -> T: return visitor.visit_type_type(self) def __hash__(self) -> int: return hash(self.item) def __eq__(self, other: object) -> bool: if not isinstance(other, TypeType): return NotImplemented return self.item == other.item def serialize(self) -> JsonDict: return {".class": "TypeType", "item": self.item.serialize()} @classmethod def deserialize(cls, data: JsonDict) -> Type: assert data[".class"] == "TypeType" return TypeType.make_normalized(deserialize_type(data["item"])) class PlaceholderType(ProperType): """Temporary, yet-unknown type during semantic analysis. This is needed when there's a reference to a type before the real symbol table entry of the target type is available (specifically, we use a temporary PlaceholderNode symbol node). Consider this example: class str(Sequence[str]): ... We use a PlaceholderType for the 'str' in 'Sequence[str]' since we can't create a TypeInfo for 'str' until all base classes have been resolved. We'll soon perform another analysis iteration which replaces the base class with a complete type without any placeholders. After semantic analysis, no placeholder types must exist. """ __slots__ = ("fullname", "args") def __init__(self, fullname: str | None, args: list[Type], line: int) -> None: super().__init__(line) self.fullname = fullname # Must be a valid full name of an actual node (or None). self.args = args def accept(self, visitor: TypeVisitor[T]) -> T: assert isinstance(visitor, SyntheticTypeVisitor) ret: T = visitor.visit_placeholder_type(self) return ret def __hash__(self) -> int: return hash((self.fullname, tuple(self.args))) def __eq__(self, other: object) -> bool: if not isinstance(other, PlaceholderType): return NotImplemented return self.fullname == other.fullname and self.args == other.args def serialize(self) -> str: # We should never get here since all placeholders should be replaced # during semantic analysis. assert False, f"Internal error: unresolved placeholder type {self.fullname}" @overload def get_proper_type(typ: None) -> None: ... @overload def get_proper_type(typ: Type) -> ProperType: ... def get_proper_type(typ: Type | None) -> ProperType | None: """Get the expansion of a type alias type. If the type is already a proper type, this is a no-op. Use this function wherever a decision is made on a call like e.g. 'if isinstance(typ, UnionType): ...', because 'typ' in this case may be an alias to union. Note: if after making the decision on the isinstance() call you pass on the original type (and not one of its components) it is recommended to *always* pass on the unexpanded alias. """ if typ is None: return None if isinstance(typ, TypeGuardedType): # type: ignore[misc] typ = typ.type_guard while isinstance(typ, TypeAliasType): typ = typ._expand_once() # TODO: store the name of original type alias on this type, so we can show it in errors. return cast(ProperType, typ) @overload def get_proper_types(types: list[Type] | tuple[Type, ...]) -> list[ProperType]: ... @overload def get_proper_types( types: list[Type | None] | tuple[Type | None, ...] ) -> list[ProperType | None]: ... def get_proper_types( types: list[Type] | list[Type | None] | tuple[Type | None, ...] ) -> list[ProperType] | list[ProperType | None]: if isinstance(types, list): typelist = types # Optimize for the common case so that we don't need to allocate anything if not any( isinstance(t, (TypeAliasType, TypeGuardedType)) for t in typelist # type: ignore[misc] ): return cast("list[ProperType]", typelist) return [get_proper_type(t) for t in typelist] else: return [get_proper_type(t) for t in types] # We split off the type visitor base classes to another module # to make it easier to gradually get modules working with mypyc. # Import them here, after the types are defined. # This is intended as a re-export also. from mypy.type_visitor import ( ALL_STRATEGY as ALL_STRATEGY, ANY_STRATEGY as ANY_STRATEGY, BoolTypeQuery as BoolTypeQuery, SyntheticTypeVisitor as SyntheticTypeVisitor, TypeQuery as TypeQuery, TypeTranslator as TypeTranslator, TypeVisitor as TypeVisitor, ) from mypy.typetraverser import TypeTraverserVisitor class TypeStrVisitor(SyntheticTypeVisitor[str]): """Visitor for pretty-printing types into strings. This is mostly for debugging/testing. Do not preserve original formatting. Notes: - Represent unbound types as Foo? or Foo?[...]. - Represent the NoneType type as None. """ def __init__(self, id_mapper: IdMapper | None = None, *, options: Options) -> None: self.id_mapper = id_mapper self.any_as_dots = False self.options = options def visit_unbound_type(self, t: UnboundType) -> str: s = t.name + "?" if t.args: s += f"[{self.list_str(t.args)}]" return s def visit_type_list(self, t: TypeList) -> str: return f"<TypeList {self.list_str(t.items)}>" def visit_callable_argument(self, t: CallableArgument) -> str: typ = t.typ.accept(self) if t.name is None: return f"{t.constructor}({typ})" else: return f"{t.constructor}({typ}, {t.name})" def visit_any(self, t: AnyType) -> str: if self.any_as_dots and t.type_of_any == TypeOfAny.special_form: return "..." return "Any" def visit_none_type(self, t: NoneType) -> str: return "None" def visit_uninhabited_type(self, t: UninhabitedType) -> str: return "Never" def visit_erased_type(self, t: ErasedType) -> str: return "<Erased>" def visit_deleted_type(self, t: DeletedType) -> str: if t.source is None: return "<Deleted>" else: return f"<Deleted '{t.source}'>" def visit_instance(self, t: Instance) -> str: if t.last_known_value and not t.args: # Instances with a literal fallback should never be generic. If they are, # something went wrong so we fall back to showing the full Instance repr. s = f"{t.last_known_value.accept(self)}?" else: s = t.type.fullname or t.type.name or "<???>" if t.args: if t.type.fullname == "builtins.tuple": assert len(t.args) == 1 s += f"[{self.list_str(t.args)}, ...]" else: s += f"[{self.list_str(t.args)}]" elif t.type.has_type_var_tuple_type and len(t.type.type_vars) == 1: s += "[()]" if self.id_mapper: s += f"<{self.id_mapper.id(t.type)}>" return s def visit_type_var(self, t: TypeVarType) -> str: if t.name is None: # Anonymous type variable type (only numeric id). s = f"`{t.id}" else: # Named type variable type. s = f"{t.name}`{t.id}" if self.id_mapper and t.upper_bound: s += f"(upper_bound={t.upper_bound.accept(self)})" if t.has_default(): s += f" = {t.default.accept(self)}" return s def visit_param_spec(self, t: ParamSpecType) -> str: # prefixes are displayed as Concatenate s = "" if t.prefix.arg_types: s += f"[{self.list_str(t.prefix.arg_types)}, **" if t.name is None: # Anonymous type variable type (only numeric id). s += f"`{t.id}" else: # Named type variable type. s += f"{t.name_with_suffix()}`{t.id}" if t.prefix.arg_types: s += "]" if t.has_default(): s += f" = {t.default.accept(self)}" return s def visit_parameters(self, t: Parameters) -> str: # This is copied from visit_callable -- is there a way to decrease duplication? if t.is_ellipsis_args: return "..." s = "" bare_asterisk = False for i in range(len(t.arg_types)): if s != "": s += ", " if t.arg_kinds[i].is_named() and not bare_asterisk: s += "*, " bare_asterisk = True if t.arg_kinds[i] == ARG_STAR: s += "*" if t.arg_kinds[i] == ARG_STAR2: s += "**" name = t.arg_names[i] if name: s += f"{name}: " r = t.arg_types[i].accept(self) s += r if t.arg_kinds[i].is_optional(): s += " =" return f"[{s}]" def visit_type_var_tuple(self, t: TypeVarTupleType) -> str: if t.name is None: # Anonymous type variable type (only numeric id). s = f"`{t.id}" else: # Named type variable type. s = f"{t.name}`{t.id}" if t.has_default(): s += f" = {t.default.accept(self)}" return s def visit_callable_type(self, t: CallableType) -> str: param_spec = t.param_spec() if param_spec is not None: num_skip = 2 else: num_skip = 0 s = "" asterisk = False for i in range(len(t.arg_types) - num_skip): if s != "": s += ", " if t.arg_kinds[i].is_named() and not asterisk: s += "*, " asterisk = True if t.arg_kinds[i] == ARG_STAR: s += "*" asterisk = True if t.arg_kinds[i] == ARG_STAR2: s += "**" name = t.arg_names[i] if name: s += name + ": " type_str = t.arg_types[i].accept(self) if t.arg_kinds[i] == ARG_STAR2 and t.unpack_kwargs: type_str = f"Unpack[{type_str}]" s += type_str if t.arg_kinds[i].is_optional(): s += " =" if param_spec is not None: n = param_spec.name if s: s += ", " s += f"*{n}.args, **{n}.kwargs" if param_spec.has_default(): s += f" = {param_spec.default.accept(self)}" s = f"({s})" if not isinstance(get_proper_type(t.ret_type), NoneType): if t.type_guard is not None: s += f" -> TypeGuard[{t.type_guard.accept(self)}]" elif t.type_is is not None: s += f" -> TypeIs[{t.type_is.accept(self)}]" else: s += f" -> {t.ret_type.accept(self)}" if t.variables: vs = [] for var in t.variables: if isinstance(var, TypeVarType): # We reimplement TypeVarType.__repr__ here in order to support id_mapper. if var.values: vals = f"({', '.join(val.accept(self) for val in var.values)})" vs.append(f"{var.name} in {vals}") elif not is_named_instance(var.upper_bound, "builtins.object"): vs.append( f"{var.name} <: {var.upper_bound.accept(self)}{f' = {var.default.accept(self)}' if var.has_default() else ''}" ) else: vs.append( f"{var.name}{f' = {var.default.accept(self)}' if var.has_default() else ''}" ) else: # For other TypeVarLikeTypes, use the name and default vs.append( f"{var.name}{f' = {var.default.accept(self)}' if var.has_default() else ''}" ) s = f"[{', '.join(vs)}] {s}" return f"def {s}" def visit_overloaded(self, t: Overloaded) -> str: a = [] for i in t.items: a.append(i.accept(self)) return f"Overload({', '.join(a)})" def visit_tuple_type(self, t: TupleType) -> str: s = self.list_str(t.items) or "()" tuple_name = "tuple" if self.options.use_lowercase_names() else "Tuple" if t.partial_fallback and t.partial_fallback.type: fallback_name = t.partial_fallback.type.fullname if fallback_name != "builtins.tuple": return f"{tuple_name}[{s}, fallback={t.partial_fallback.accept(self)}]" return f"{tuple_name}[{s}]" def visit_typeddict_type(self, t: TypedDictType) -> str: def item_str(name: str, typ: str) -> str: modifier = "" if name not in t.required_keys: modifier += "?" if name in t.readonly_keys: modifier += "=" return f"{name!r}{modifier}: {typ}" s = ( "{" + ", ".join(item_str(name, typ.accept(self)) for name, typ in t.items.items()) + "}" ) prefix = "" if t.fallback and t.fallback.type: if t.fallback.type.fullname not in TPDICT_FB_NAMES: prefix = repr(t.fallback.type.fullname) + ", " return f"TypedDict({prefix}{s})" def visit_raw_expression_type(self, t: RawExpressionType) -> str: return repr(t.literal_value) def visit_literal_type(self, t: LiteralType) -> str: return f"Literal[{t.value_repr()}]" def visit_union_type(self, t: UnionType) -> str: s = self.list_str(t.items) return f"Union[{s}]" def visit_partial_type(self, t: PartialType) -> str: if t.type is None: return "<partial None>" else: return "<partial {}[{}]>".format(t.type.name, ", ".join(["?"] * len(t.type.type_vars))) def visit_ellipsis_type(self, t: EllipsisType) -> str: return "..." def visit_type_type(self, t: TypeType) -> str: if self.options.use_lowercase_names(): type_name = "type" else: type_name = "Type" return f"{type_name}[{t.item.accept(self)}]" def visit_placeholder_type(self, t: PlaceholderType) -> str: return f"<placeholder {t.fullname}>" def visit_type_alias_type(self, t: TypeAliasType) -> str: if t.alias is not None: unrolled, recursed = t._partial_expansion() self.any_as_dots = recursed type_str = unrolled.accept(self) self.any_as_dots = False return type_str return "<alias (unfixed)>" def visit_unpack_type(self, t: UnpackType) -> str: return f"Unpack[{t.type.accept(self)}]" def list_str(self, a: Iterable[Type]) -> str: """Convert items of an array to strings (pretty-print types) and join the results with commas. """ res = [] for t in a: res.append(t.accept(self)) return ", ".join(res) class TrivialSyntheticTypeTranslator(TypeTranslator, SyntheticTypeVisitor[Type]): """A base class for type translators that need to be run during semantic analysis.""" def visit_placeholder_type(self, t: PlaceholderType) -> Type: return t def visit_callable_argument(self, t: CallableArgument) -> Type: return t def visit_ellipsis_type(self, t: EllipsisType) -> Type: return t def visit_raw_expression_type(self, t: RawExpressionType) -> Type: return t def visit_type_list(self, t: TypeList) -> Type: return t class UnrollAliasVisitor(TrivialSyntheticTypeTranslator): def __init__( self, initial_aliases: set[TypeAliasType], cache: dict[Type, Type] | None ) -> None: assert cache is not None super().__init__(cache) self.recursed = False self.initial_aliases = initial_aliases def visit_type_alias_type(self, t: TypeAliasType) -> Type: if t in self.initial_aliases: self.recursed = True return AnyType(TypeOfAny.special_form) # Create a new visitor on encountering a new type alias, so that an alias like # A = Tuple[B, B] # B = int # will not be detected as recursive on the second encounter of B. subvisitor = UnrollAliasVisitor(self.initial_aliases | {t}, self.cache) result = get_proper_type(t).accept(subvisitor) if subvisitor.recursed: self.recursed = True return result def is_named_instance(t: Type, fullnames: str | tuple[str, ...]) -> TypeGuard[Instance]: if not isinstance(fullnames, tuple): fullnames = (fullnames,) t = get_proper_type(t) return isinstance(t, Instance) and t.type.fullname in fullnames class LocationSetter(TypeTraverserVisitor): # TODO: Should we update locations of other Type subclasses? def __init__(self, line: int, column: int) -> None: self.line = line self.column = column def visit_instance(self, typ: Instance) -> None: typ.line = self.line typ.column = self.column super().visit_instance(typ) def visit_type_alias_type(self, typ: TypeAliasType) -> None: typ.line = self.line typ.column = self.column super().visit_type_alias_type(typ) class HasTypeVars(BoolTypeQuery): def __init__(self) -> None: super().__init__(ANY_STRATEGY) self.skip_alias_target = True def visit_type_var(self, t: TypeVarType) -> bool: return True def visit_type_var_tuple(self, t: TypeVarTupleType) -> bool: return True def visit_param_spec(self, t: ParamSpecType) -> bool: return True def has_type_vars(typ: Type) -> bool: """Check if a type contains any type variables (recursively).""" return typ.accept(HasTypeVars()) class HasRecursiveType(BoolTypeQuery): def __init__(self) -> None: super().__init__(ANY_STRATEGY) def visit_type_alias_type(self, t: TypeAliasType) -> bool: return t.is_recursive or self.query_types(t.args) # Use singleton since this is hot (note: call reset() before using) _has_recursive_type: Final = HasRecursiveType() def has_recursive_types(typ: Type) -> bool: """Check if a type contains any recursive aliases (recursively).""" _has_recursive_type.reset() return typ.accept(_has_recursive_type) def split_with_prefix_and_suffix( types: tuple[Type, ...], prefix: int, suffix: int ) -> tuple[tuple[Type, ...], tuple[Type, ...], tuple[Type, ...]]: if len(types) <= prefix + suffix: types = extend_args_for_prefix_and_suffix(types, prefix, suffix) if suffix: return types[:prefix], types[prefix:-suffix], types[-suffix:] else: return types[:prefix], types[prefix:], () def extend_args_for_prefix_and_suffix( types: tuple[Type, ...], prefix: int, suffix: int ) -> tuple[Type, ...]: """Extend list of types by eating out from variadic tuple to satisfy prefix and suffix.""" idx = None item = None for i, t in enumerate(types): if isinstance(t, UnpackType): p_type = get_proper_type(t.type) if isinstance(p_type, Instance) and p_type.type.fullname == "builtins.tuple": item = p_type.args[0] idx = i break if idx is None: return types assert item is not None if idx < prefix: start = (item,) * (prefix - idx) else: start = () if len(types) - idx - 1 < suffix: end = (item,) * (suffix - len(types) + idx + 1) else: end = () return types[:idx] + start + (types[idx],) + end + types[idx + 1 :] def flatten_nested_unions( types: Sequence[Type], *, handle_type_alias_type: bool = True, handle_recursive: bool = True ) -> list[Type]: """Flatten nested unions in a type list.""" if not isinstance(types, list): typelist = list(types) else: typelist = cast("list[Type]", types) # Fast path: most of the time there is nothing to flatten if not any(isinstance(t, (TypeAliasType, UnionType)) for t in typelist): # type: ignore[misc] return typelist flat_items: list[Type] = [] for t in typelist: if handle_type_alias_type: if not handle_recursive and isinstance(t, TypeAliasType) and t.is_recursive: tp: Type = t else: tp = get_proper_type(t) else: tp = t if isinstance(tp, ProperType) and isinstance(tp, UnionType): flat_items.extend( flatten_nested_unions(tp.items, handle_type_alias_type=handle_type_alias_type) ) else: # Must preserve original aliases when possible. flat_items.append(t) return flat_items def find_unpack_in_list(items: Sequence[Type]) -> int | None: unpack_index: int | None = None for i, item in enumerate(items): if isinstance(item, UnpackType): # We cannot fail here, so we must check this in an earlier # semanal phase. # Funky code here avoids mypyc narrowing the type of unpack_index. old_index = unpack_index assert old_index is None # Don't return so that we can also sanity check there is only one. unpack_index = i return unpack_index def flatten_nested_tuples(types: Sequence[Type]) -> list[Type]: """Recursively flatten TupleTypes nested with Unpack. For example this will transform Tuple[A, Unpack[Tuple[B, Unpack[Tuple[C, D]]]]] into Tuple[A, B, C, D] """ res = [] for typ in types: if not isinstance(typ, UnpackType): res.append(typ) continue p_type = get_proper_type(typ.type) if not isinstance(p_type, TupleType): res.append(typ) continue res.extend(flatten_nested_tuples(p_type.items)) return res def is_literal_type(typ: ProperType, fallback_fullname: str, value: LiteralValue) -> bool: """Check if this type is a LiteralType with the given fallback type and value.""" if isinstance(typ, Instance) and typ.last_known_value: typ = typ.last_known_value return ( isinstance(typ, LiteralType) and typ.fallback.type.fullname == fallback_fullname and typ.value == value ) names: Final = globals().copy() names.pop("NOT_READY", None) deserialize_map: Final = { key: obj.deserialize for key, obj in names.items() if isinstance(obj, type) and issubclass(obj, Type) and obj is not Type } def callable_with_ellipsis(any_type: AnyType, ret_type: Type, fallback: Instance) -> CallableType: """Construct type Callable[..., ret_type].""" return CallableType( [any_type, any_type], [ARG_STAR, ARG_STAR2], [None, None], ret_type=ret_type, fallback=fallback, is_ellipsis_args=True, ) def remove_dups(types: list[T]) -> list[T]: if len(types) <= 1: return types # Get unique elements in order of appearance all_types: set[T] = set() new_types: list[T] = [] for t in types: if t not in all_types: new_types.append(t) all_types.add(t) return new_types def type_vars_as_args(type_vars: Sequence[TypeVarLikeType]) -> tuple[Type, ...]: """Represent type variables as they would appear in a type argument list.""" args: list[Type] = [] for tv in type_vars: if isinstance(tv, TypeVarTupleType): args.append(UnpackType(tv)) else: args.append(tv) return tuple(args) # This cyclic import is unfortunate, but to avoid it we would need to move away all uses # of get_proper_type() from types.py. Majority of them have been removed, but few remaining # are quite tricky to get rid of, but ultimately we want to do it at some point. from mypy.expandtype import ExpandTypeVisitor class InstantiateAliasVisitor(ExpandTypeVisitor): def visit_union_type(self, t: UnionType) -> Type: # Unlike regular expand_type(), we don't do any simplification for unions, # not even removing strict duplicates. There are three reasons for this: # * get_proper_type() is a very hot function, even slightest slow down will # cause a perf regression # * We want to preserve this historical behaviour, to avoid possible # regressions # * Simplifying unions may (indirectly) call get_proper_type(), causing # infinite recursion. return TypeTranslator.visit_union_type(self, t)
algorandfoundation/puya
src/puyapy/_vendor/mypy/types.py
Python
NOASSERTION
137,495
""" This module is for (more basic) type operations that should not depend on is_subtype(), meet_types(), join_types() etc. We don't want to keep them in mypy/types.py for two reasons: * Reduce the size of that module. * Reduce use of get_proper_type() in types.py to avoid cyclic imports expand_type <-> types, if we move get_proper_type() to the former. """ from __future__ import annotations from typing import Callable, Iterable, cast from mypy.nodes import ARG_STAR, ARG_STAR2, FuncItem, TypeAlias from mypy.types import ( AnyType, CallableType, Instance, NoneType, Overloaded, ParamSpecType, ProperType, TupleType, Type, TypeAliasType, TypeType, TypeVarType, UnionType, UnpackType, flatten_nested_unions, get_proper_type, get_proper_types, ) def flatten_types(types: Iterable[Type]) -> Iterable[Type]: for t in types: tp = get_proper_type(t) if isinstance(tp, UnionType): yield from flatten_types(tp.items) else: yield t def strip_type(typ: Type) -> Type: """Make a copy of type without 'debugging info' (function name).""" orig_typ = typ typ = get_proper_type(typ) if isinstance(typ, CallableType): return typ.copy_modified(name=None) elif isinstance(typ, Overloaded): return Overloaded([cast(CallableType, strip_type(item)) for item in typ.items]) else: return orig_typ def is_invalid_recursive_alias(seen_nodes: set[TypeAlias], target: Type) -> bool: """Flag aliases like A = Union[int, A], T = tuple[int, *T] (and similar mutual aliases). Such aliases don't make much sense, and cause problems in later phases. """ if isinstance(target, TypeAliasType): if target.alias in seen_nodes: return True assert target.alias, f"Unfixed type alias {target.type_ref}" return is_invalid_recursive_alias(seen_nodes | {target.alias}, get_proper_type(target)) assert isinstance(target, ProperType) if not isinstance(target, (UnionType, TupleType)): return False if isinstance(target, UnionType): return any(is_invalid_recursive_alias(seen_nodes, item) for item in target.items) for item in target.items: if isinstance(item, UnpackType): if is_invalid_recursive_alias(seen_nodes, item.type): return True return False def is_bad_type_type_item(item: Type) -> bool: """Prohibit types like Type[Type[...]]. Such types are explicitly prohibited by PEP 484. Also, they cause problems with recursive types like T = Type[T], because internal representation of TypeType item is normalized (i.e. always a proper type). """ item = get_proper_type(item) if isinstance(item, TypeType): return True if isinstance(item, UnionType): return any( isinstance(get_proper_type(i), TypeType) for i in flatten_nested_unions(item.items) ) return False def is_union_with_any(tp: Type) -> bool: """Is this a union with Any or a plain Any type?""" tp = get_proper_type(tp) if isinstance(tp, AnyType): return True if not isinstance(tp, UnionType): return False return any(is_union_with_any(t) for t in get_proper_types(tp.items)) def is_generic_instance(tp: Type) -> bool: tp = get_proper_type(tp) return isinstance(tp, Instance) and bool(tp.args) def is_overlapping_none(t: Type) -> bool: t = get_proper_type(t) return isinstance(t, NoneType) or ( isinstance(t, UnionType) and any(isinstance(get_proper_type(e), NoneType) for e in t.items) ) def remove_optional(typ: Type) -> Type: typ = get_proper_type(typ) if isinstance(typ, UnionType): return UnionType.make_union( [t for t in typ.items if not isinstance(get_proper_type(t), NoneType)] ) else: return typ def is_self_type_like(typ: Type, *, is_classmethod: bool) -> bool: """Does this look like a self-type annotation?""" typ = get_proper_type(typ) if not is_classmethod: return isinstance(typ, TypeVarType) if not isinstance(typ, TypeType): return False return isinstance(typ.item, TypeVarType) def store_argument_type( defn: FuncItem, i: int, typ: CallableType, named_type: Callable[[str, list[Type]], Instance] ) -> None: arg_type = typ.arg_types[i] if typ.arg_kinds[i] == ARG_STAR: if isinstance(arg_type, ParamSpecType): pass elif isinstance(arg_type, UnpackType): unpacked_type = get_proper_type(arg_type.type) if isinstance(unpacked_type, TupleType): # Instead of using Tuple[Unpack[Tuple[...]]], just use Tuple[...] arg_type = unpacked_type elif ( isinstance(unpacked_type, Instance) and unpacked_type.type.fullname == "builtins.tuple" ): arg_type = unpacked_type else: # TODO: verify that we can only have a TypeVarTuple here. arg_type = TupleType( [arg_type], fallback=named_type("builtins.tuple", [named_type("builtins.object", [])]), ) else: # builtins.tuple[T] is typing.Tuple[T, ...] arg_type = named_type("builtins.tuple", [arg_type]) elif typ.arg_kinds[i] == ARG_STAR2: if not isinstance(arg_type, ParamSpecType) and not typ.unpack_kwargs: arg_type = named_type("builtins.dict", [named_type("builtins.str", []), arg_type]) defn.arguments[i].variable.type = arg_type
algorandfoundation/puya
src/puyapy/_vendor/mypy/types_utils.py
Python
NOASSERTION
5,671
""" A shared state for all TypeInfos that holds global cache and dependency information, and potentially other mutable TypeInfo state. This module contains mutable global state. """ from __future__ import annotations from typing import Dict, Final, Set, Tuple from typing_extensions import TypeAlias as _TypeAlias from mypy.nodes import VARIANCE_NOT_READY, TypeInfo from mypy.server.trigger import make_trigger from mypy.types import Instance, Type, TypeVarId, TypeVarType, get_proper_type MAX_NEGATIVE_CACHE_TYPES: Final = 1000 MAX_NEGATIVE_CACHE_ENTRIES: Final = 10000 # Represents that the 'left' instance is a subtype of the 'right' instance SubtypeRelationship: _TypeAlias = Tuple[Instance, Instance] # A tuple encoding the specific conditions under which we performed the subtype check. # (e.g. did we want a proper subtype? A regular subtype while ignoring variance?) SubtypeKind: _TypeAlias = Tuple[bool, ...] # A cache that keeps track of whether the given TypeInfo is a part of a particular # subtype relationship SubtypeCache: _TypeAlias = Dict[TypeInfo, Dict[SubtypeKind, Set[SubtypeRelationship]]] class TypeState: """This class provides subtype caching to improve performance of subtype checks. It also holds protocol fine grained dependencies. Note: to avoid leaking global state, 'reset_all_subtype_caches()' should be called after a build has finished and after a daemon shutdown. This subtype cache only exists for performance reasons, resetting subtype caches for a class has no semantic effect. The protocol dependencies however are only stored here, and shouldn't be deleted unless not needed any more (e.g. during daemon shutdown). """ # '_subtype_caches' keeps track of (subtype, supertype) pairs where supertypes are # instances of the given TypeInfo. The cache also keeps track of whether the check # was done in strict optional mode and of the specific *kind* of subtyping relationship, # which we represent as an arbitrary hashable tuple. # We need the caches, since subtype checks for structural types are very slow. _subtype_caches: Final[SubtypeCache] # Same as above but for negative subtyping results. _negative_subtype_caches: Final[SubtypeCache] # This contains protocol dependencies generated after running a full build, # or after an update. These dependencies are special because: # * They are a global property of the program; i.e. some dependencies for imported # classes can be generated in the importing modules. # * Because of the above, they are serialized separately, after a full run, # or a full update. # `proto_deps` can be None if after deserialization it turns out that they are # inconsistent with the other cache files (or an error occurred during deserialization). # A blocking error will be generated in this case, since we can't proceed safely. # For the description of kinds of protocol dependencies and corresponding examples, # see _snapshot_protocol_deps. proto_deps: dict[str, set[str]] | None # Protocols (full names) a given class attempted to implement. # Used to calculate fine grained protocol dependencies and optimize protocol # subtype cache invalidation in fine grained mode. For example, if we pass a value # of type a.A to a function expecting something compatible with protocol p.P, # we'd have 'a.A' -> {'p.P', ...} in the map. This map is flushed after every incremental # update. _attempted_protocols: Final[dict[str, set[str]]] # We also snapshot protocol members of the above protocols. For example, if we pass # a value of type a.A to a function expecting something compatible with Iterable, we'd have # 'a.A' -> {'__iter__', ...} in the map. This map is also flushed after every incremental # update. This map is needed to only generate dependencies like <a.A.__iter__> -> <a.A> # instead of a wildcard to avoid unnecessarily invalidating classes. _checked_against_members: Final[dict[str, set[str]]] # TypeInfos that appeared as a left type (subtype) in a subtype check since latest # dependency snapshot update. This is an optimisation for fine grained mode; during a full # run we only take a dependency snapshot at the very end, so this set will contain all # subtype-checked TypeInfos. After a fine grained update however, we can gather only new # dependencies generated from (typically) few TypeInfos that were subtype-checked # (i.e. appeared as r.h.s. in an assignment or an argument in a function call in # a re-checked target) during the update. _rechecked_types: Final[set[TypeInfo]] # The two attributes below are assumption stacks for subtyping relationships between # recursive type aliases. Normally, one would pass type assumptions as an additional # arguments to is_subtype(), but this would mean updating dozens of related functions # threading this through all callsites (see also comment for TypeInfo.assuming). _assuming: Final[list[tuple[Type, Type]]] _assuming_proper: Final[list[tuple[Type, Type]]] # Ditto for inference of generic constraints against recursive type aliases. inferring: Final[list[tuple[Type, Type]]] # Whether to use joins or unions when solving constraints, see checkexpr.py for details. infer_unions: bool # Whether to use new type inference algorithm that can infer polymorphic types. # This is temporary and will be removed soon when new algorithm is more polished. infer_polymorphic: bool # N.B: We do all of the accesses to these properties through # TypeState, instead of making these classmethods and accessing # via the cls parameter, since mypyc can optimize accesses to # Final attributes of a directly referenced type. def __init__(self) -> None: self._subtype_caches = {} self._negative_subtype_caches = {} self.proto_deps = {} self._attempted_protocols = {} self._checked_against_members = {} self._rechecked_types = set() self._assuming = [] self._assuming_proper = [] self.inferring = [] self.infer_unions = False self.infer_polymorphic = False def is_assumed_subtype(self, left: Type, right: Type) -> bool: for l, r in reversed(self._assuming): if get_proper_type(l) == get_proper_type(left) and get_proper_type( r ) == get_proper_type(right): return True return False def is_assumed_proper_subtype(self, left: Type, right: Type) -> bool: for l, r in reversed(self._assuming_proper): if get_proper_type(l) == get_proper_type(left) and get_proper_type( r ) == get_proper_type(right): return True return False def get_assumptions(self, is_proper: bool) -> list[tuple[Type, Type]]: if is_proper: return self._assuming_proper return self._assuming def reset_all_subtype_caches(self) -> None: """Completely reset all known subtype caches.""" self._subtype_caches.clear() self._negative_subtype_caches.clear() def reset_subtype_caches_for(self, info: TypeInfo) -> None: """Reset subtype caches (if any) for a given supertype TypeInfo.""" if info in self._subtype_caches: self._subtype_caches[info].clear() if info in self._negative_subtype_caches: self._negative_subtype_caches[info].clear() def reset_all_subtype_caches_for(self, info: TypeInfo) -> None: """Reset subtype caches (if any) for a given supertype TypeInfo and its MRO.""" for item in info.mro: self.reset_subtype_caches_for(item) def is_cached_subtype_check(self, kind: SubtypeKind, left: Instance, right: Instance) -> bool: if left.last_known_value is not None or right.last_known_value is not None: # If there is a literal last known value, give up. There # will be an unbounded number of potential types to cache, # making caching less effective. return False info = right.type cache = self._subtype_caches.get(info) if cache is None: return False subcache = cache.get(kind) if subcache is None: return False return (left, right) in subcache def is_cached_negative_subtype_check( self, kind: SubtypeKind, left: Instance, right: Instance ) -> bool: if left.last_known_value is not None or right.last_known_value is not None: # If there is a literal last known value, give up. There # will be an unbounded number of potential types to cache, # making caching less effective. return False info = right.type cache = self._negative_subtype_caches.get(info) if cache is None: return False subcache = cache.get(kind) if subcache is None: return False return (left, right) in subcache def record_subtype_cache_entry( self, kind: SubtypeKind, left: Instance, right: Instance ) -> None: if left.last_known_value is not None or right.last_known_value is not None: # These are unlikely to match, due to the large space of # possible values. Avoid uselessly increasing cache sizes. return if any( (isinstance(tv, TypeVarType) and tv.variance == VARIANCE_NOT_READY) for tv in right.type.defn.type_vars ): # Variance indeterminate -- don't know the result return cache = self._subtype_caches.setdefault(right.type, {}) cache.setdefault(kind, set()).add((left, right)) def record_negative_subtype_cache_entry( self, kind: SubtypeKind, left: Instance, right: Instance ) -> None: if left.last_known_value is not None or right.last_known_value is not None: # These are unlikely to match, due to the large space of # possible values. Avoid uselessly increasing cache sizes. return if len(self._negative_subtype_caches) > MAX_NEGATIVE_CACHE_TYPES: self._negative_subtype_caches.clear() cache = self._negative_subtype_caches.setdefault(right.type, {}) subcache = cache.setdefault(kind, set()) if len(subcache) > MAX_NEGATIVE_CACHE_ENTRIES: subcache.clear() cache.setdefault(kind, set()).add((left, right)) def reset_protocol_deps(self) -> None: """Reset dependencies after a full run or before a daemon shutdown.""" self.proto_deps = {} self._attempted_protocols.clear() self._checked_against_members.clear() self._rechecked_types.clear() def record_protocol_subtype_check(self, left_type: TypeInfo, right_type: TypeInfo) -> None: assert right_type.is_protocol self._rechecked_types.add(left_type) self._attempted_protocols.setdefault(left_type.fullname, set()).add(right_type.fullname) self._checked_against_members.setdefault(left_type.fullname, set()).update( right_type.protocol_members ) def _snapshot_protocol_deps(self) -> dict[str, set[str]]: """Collect protocol attribute dependencies found so far from registered subtype checks. There are three kinds of protocol dependencies. For example, after a subtype check: x: Proto = C() the following dependencies will be generated: 1. ..., <SuperProto[wildcard]>, <Proto[wildcard]> -> <Proto> 2. ..., <B.attr>, <C.attr> -> <C> [for every attr in Proto members] 3. <C> -> Proto # this one to invalidate the subtype cache The first kind is generated immediately per-module in deps.py (see also an example there for motivation why it is needed). While two other kinds are generated here after all modules are type checked and we have recorded all the subtype checks. To understand these two kinds, consider a simple example: class A: def __iter__(self) -> Iterator[int]: ... it: Iterable[int] = A() We add <a.A.__iter__> -> <a.A> to invalidate the assignment (module target in this case), whenever the signature of a.A.__iter__ changes. We also add <a.A> -> typing.Iterable, to invalidate the subtype caches of the latter. (Note that the same logic applies to proper subtype checks, and calculating meets and joins, if this involves calling 'subtypes.is_protocol_implementation'). """ deps: dict[str, set[str]] = {} for info in self._rechecked_types: for attr in self._checked_against_members[info.fullname]: # The need for full MRO here is subtle, during an update, base classes of # a concrete class may not be reprocessed, so not all <B.x> -> <C.x> deps # are added. for base_info in info.mro[:-1]: trigger = make_trigger(f"{base_info.fullname}.{attr}") if "typing" in trigger or "builtins" in trigger: # TODO: avoid everything from typeshed continue deps.setdefault(trigger, set()).add(make_trigger(info.fullname)) for proto in self._attempted_protocols[info.fullname]: trigger = make_trigger(info.fullname) if "typing" in trigger or "builtins" in trigger: continue # If any class that was checked against a protocol changes, # we need to reset the subtype cache for the protocol. # # Note: strictly speaking, the protocol doesn't need to be # re-checked, we only need to reset the cache, and its uses # elsewhere are still valid (unless invalidated by other deps). deps.setdefault(trigger, set()).add(proto) return deps def update_protocol_deps(self, second_map: dict[str, set[str]] | None = None) -> None: """Update global protocol dependency map. We update the global map incrementally, using a snapshot only from recently type checked types. If second_map is given, update it as well. This is currently used by FineGrainedBuildManager that maintains normal (non-protocol) dependencies. """ assert self.proto_deps is not None, "This should not be called after failed cache load" new_deps = self._snapshot_protocol_deps() for trigger, targets in new_deps.items(): self.proto_deps.setdefault(trigger, set()).update(targets) if second_map is not None: for trigger, targets in new_deps.items(): second_map.setdefault(trigger, set()).update(targets) self._rechecked_types.clear() self._attempted_protocols.clear() self._checked_against_members.clear() def add_all_protocol_deps(self, deps: dict[str, set[str]]) -> None: """Add all known protocol dependencies to deps. This is used by tests and debug output, and also when collecting all collected or loaded dependencies as part of build. """ self.update_protocol_deps() # just in case if self.proto_deps is not None: for trigger, targets in self.proto_deps.items(): deps.setdefault(trigger, set()).update(targets) type_state: Final = TypeState() def reset_global_state() -> None: """Reset most existing global state. Currently most of it is in this module. Few exceptions are strict optional status and functools.lru_cache. """ type_state.reset_all_subtype_caches() type_state.reset_protocol_deps() TypeVarId.next_raw_id = 1
algorandfoundation/puya
src/puyapy/_vendor/mypy/typestate.py
Python
NOASSERTION
16,005
from __future__ import annotations from typing import Iterable from mypy_extensions import trait from mypy.types import ( AnyType, CallableArgument, CallableType, DeletedType, EllipsisType, ErasedType, Instance, LiteralType, NoneType, Overloaded, Parameters, ParamSpecType, PartialType, PlaceholderType, RawExpressionType, SyntheticTypeVisitor, TupleType, Type, TypeAliasType, TypedDictType, TypeList, TypeType, TypeVarTupleType, TypeVarType, UnboundType, UninhabitedType, UnionType, UnpackType, ) @trait class TypeTraverserVisitor(SyntheticTypeVisitor[None]): """Visitor that traverses all components of a type""" # Atomic types def visit_any(self, t: AnyType) -> None: pass def visit_uninhabited_type(self, t: UninhabitedType) -> None: pass def visit_none_type(self, t: NoneType) -> None: pass def visit_erased_type(self, t: ErasedType) -> None: pass def visit_deleted_type(self, t: DeletedType) -> None: pass def visit_type_var(self, t: TypeVarType) -> None: # Note that type variable values and upper bound aren't treated as # components, since they are components of the type variable # definition. We want to traverse everything just once. t.default.accept(self) def visit_param_spec(self, t: ParamSpecType) -> None: t.default.accept(self) def visit_parameters(self, t: Parameters) -> None: self.traverse_types(t.arg_types) def visit_type_var_tuple(self, t: TypeVarTupleType) -> None: t.default.accept(self) def visit_literal_type(self, t: LiteralType) -> None: t.fallback.accept(self) # Composite types def visit_instance(self, t: Instance) -> None: self.traverse_types(t.args) def visit_callable_type(self, t: CallableType) -> None: # FIX generics self.traverse_types(t.arg_types) t.ret_type.accept(self) t.fallback.accept(self) if t.type_guard is not None: t.type_guard.accept(self) if t.type_is is not None: t.type_is.accept(self) def visit_tuple_type(self, t: TupleType) -> None: self.traverse_types(t.items) t.partial_fallback.accept(self) def visit_typeddict_type(self, t: TypedDictType) -> None: self.traverse_types(t.items.values()) t.fallback.accept(self) def visit_union_type(self, t: UnionType) -> None: self.traverse_types(t.items) def visit_overloaded(self, t: Overloaded) -> None: self.traverse_types(t.items) def visit_type_type(self, t: TypeType) -> None: t.item.accept(self) # Special types (not real types) def visit_callable_argument(self, t: CallableArgument) -> None: t.typ.accept(self) def visit_unbound_type(self, t: UnboundType) -> None: self.traverse_types(t.args) def visit_type_list(self, t: TypeList) -> None: self.traverse_types(t.items) def visit_ellipsis_type(self, t: EllipsisType) -> None: pass def visit_placeholder_type(self, t: PlaceholderType) -> None: self.traverse_types(t.args) def visit_partial_type(self, t: PartialType) -> None: pass def visit_raw_expression_type(self, t: RawExpressionType) -> None: pass def visit_type_alias_type(self, t: TypeAliasType) -> None: # TODO: sometimes we want to traverse target as well # We need to find a way to indicate explicitly the intent, # maybe make this method abstract (like for TypeTranslator)? self.traverse_types(t.args) def visit_unpack_type(self, t: UnpackType) -> None: t.type.accept(self) # Helpers def traverse_types(self, types: Iterable[Type]) -> None: for typ in types: typ.accept(self)
algorandfoundation/puya
src/puyapy/_vendor/mypy/typetraverser.py
Python
NOASSERTION
3,924
from __future__ import annotations from mypy.erasetype import erase_typevars from mypy.nodes import TypeInfo from mypy.types import ( Instance, ParamSpecType, ProperType, TupleType, Type, TypeOfAny, TypeVarLikeType, TypeVarTupleType, TypeVarType, UnpackType, ) from mypy.typevartuples import erased_vars def fill_typevars(typ: TypeInfo) -> Instance | TupleType: """For a non-generic type, return instance type representing the type. For a generic G type with parameters T1, .., Tn, return G[T1, ..., Tn]. """ tvs: list[Type] = [] # TODO: why do we need to keep both typ.type_vars and typ.defn.type_vars? for i in range(len(typ.defn.type_vars)): tv: TypeVarLikeType | UnpackType = typ.defn.type_vars[i] # Change the line number if isinstance(tv, TypeVarType): tv = tv.copy_modified(line=-1, column=-1) elif isinstance(tv, TypeVarTupleType): tv = UnpackType( TypeVarTupleType( tv.name, tv.fullname, tv.id, tv.upper_bound, tv.tuple_fallback, tv.default, line=-1, column=-1, ) ) else: assert isinstance(tv, ParamSpecType) tv = ParamSpecType( tv.name, tv.fullname, tv.id, tv.flavor, tv.upper_bound, tv.default, line=-1, column=-1, ) tvs.append(tv) inst = Instance(typ, tvs) # TODO: do we need to also handle typeddict_type here and below? if typ.tuple_type is None: return inst return typ.tuple_type.copy_modified(fallback=inst) def fill_typevars_with_any(typ: TypeInfo) -> Instance | TupleType: """Apply a correct number of Any's as type arguments to a type.""" inst = Instance(typ, erased_vars(typ.defn.type_vars, TypeOfAny.special_form)) if typ.tuple_type is None: return inst erased_tuple_type = erase_typevars(typ.tuple_type, {tv.id for tv in typ.defn.type_vars}) assert isinstance(erased_tuple_type, ProperType) if isinstance(erased_tuple_type, TupleType): return typ.tuple_type.copy_modified(fallback=inst) return inst def has_no_typevars(typ: Type) -> bool: # We test if a type contains type variables by erasing all type variables # and comparing the result to the original type. We use comparison by equality that # in turn uses `__eq__` defined for types. Note: we can't use `is_same_type` because # it is not safe with unresolved forward references, while this function may be called # before forward references resolution patch pass. Note also that it is not safe to use # `is` comparison because `erase_typevars` doesn't preserve type identity. return typ == erase_typevars(typ)
algorandfoundation/puya
src/puyapy/_vendor/mypy/typevars.py
Python
NOASSERTION
2,996
"""Helpers for interacting with type var tuples.""" from __future__ import annotations from typing import Sequence from mypy.types import ( AnyType, Instance, ProperType, Type, TypeVarLikeType, TypeVarTupleType, UnpackType, get_proper_type, split_with_prefix_and_suffix, ) def split_with_instance( typ: Instance, ) -> tuple[tuple[Type, ...], tuple[Type, ...], tuple[Type, ...]]: assert typ.type.type_var_tuple_prefix is not None assert typ.type.type_var_tuple_suffix is not None return split_with_prefix_and_suffix( typ.args, typ.type.type_var_tuple_prefix, typ.type.type_var_tuple_suffix ) def extract_unpack(types: Sequence[Type]) -> ProperType | None: """Given a list of types, extracts either a single type from an unpack, or returns None.""" if len(types) == 1: if isinstance(types[0], UnpackType): return get_proper_type(types[0].type) return None def erased_vars(type_vars: Sequence[TypeVarLikeType], type_of_any: int) -> list[Type]: args: list[Type] = [] for tv in type_vars: # Valid erasure for *Ts is *tuple[Any, ...], not just Any. if isinstance(tv, TypeVarTupleType): args.append(UnpackType(tv.tuple_fallback.copy_modified(args=[AnyType(type_of_any)]))) else: args.append(AnyType(type_of_any)) return args
algorandfoundation/puya
src/puyapy/_vendor/mypy/typevartuples.py
Python
NOASSERTION
1,383
"""Utility functions with no non-trivial dependencies.""" from __future__ import annotations import hashlib import io import json import os import re import shutil import sys import time from importlib import resources as importlib_resources from typing import IO, Any, Callable, Container, Final, Iterable, Sequence, Sized, TypeVar from typing_extensions import Literal orjson: Any try: import orjson # type: ignore[import-not-found, no-redef, unused-ignore] except ImportError: orjson = None try: import curses import _curses # noqa: F401 CURSES_ENABLED = True except ImportError: CURSES_ENABLED = False T = TypeVar("T") if sys.version_info >= (3, 9): TYPESHED_DIR: Final = str(importlib_resources.files("mypy") / "typeshed") else: with importlib_resources.path( "mypy", # mypy-c doesn't support __package__ "py.typed", # a marker file for type information, we assume typeshed to live in the same dir ) as _resource: TYPESHED_DIR = str(_resource.parent / "typeshed") ENCODING_RE: Final = re.compile(rb"([ \t\v]*#.*(\r\n?|\n))??[ \t\v]*#.*coding[:=][ \t]*([-\w.]+)") DEFAULT_SOURCE_OFFSET: Final = 4 DEFAULT_COLUMNS: Final = 80 # At least this number of columns will be shown on each side of # error location when printing source code snippet. MINIMUM_WIDTH: Final = 20 # VT100 color code processing was added in Windows 10, but only the second major update, # Threshold 2. Fortunately, everyone (even on LTSB, Long Term Support Branch) should # have a version of Windows 10 newer than this. Note that Windows 8 and below are not # supported, but are either going out of support, or make up only a few % of the market. MINIMUM_WINDOWS_MAJOR_VT100: Final = 10 MINIMUM_WINDOWS_BUILD_VT100: Final = 10586 SPECIAL_DUNDERS: Final = frozenset( ("__init__", "__new__", "__call__", "__init_subclass__", "__class_getitem__") ) def is_dunder(name: str, exclude_special: bool = False) -> bool: """Returns whether name is a dunder name. Args: exclude_special: Whether to return False for a couple special dunder methods. """ if exclude_special and name in SPECIAL_DUNDERS: return False return name.startswith("__") and name.endswith("__") def is_sunder(name: str) -> bool: return not is_dunder(name) and name.startswith("_") and name.endswith("_") def split_module_names(mod_name: str) -> list[str]: """Return the module and all parent module names. So, if `mod_name` is 'a.b.c', this function will return ['a.b.c', 'a.b', and 'a']. """ out = [mod_name] while "." in mod_name: mod_name = mod_name.rsplit(".", 1)[0] out.append(mod_name) return out def module_prefix(modules: Iterable[str], target: str) -> str | None: result = split_target(modules, target) if result is None: return None return result[0] def split_target(modules: Iterable[str], target: str) -> tuple[str, str] | None: remaining: list[str] = [] while True: if target in modules: return target, ".".join(remaining) components = target.rsplit(".", 1) if len(components) == 1: return None target = components[0] remaining.insert(0, components[1]) def short_type(obj: object) -> str: """Return the last component of the type name of an object. If obj is None, return 'nil'. For example, if obj is 1, return 'int'. """ if obj is None: return "nil" t = str(type(obj)) return t.split(".")[-1].rstrip("'>") def find_python_encoding(text: bytes) -> tuple[str, int]: """PEP-263 for detecting Python file encoding""" result = ENCODING_RE.match(text) if result: line = 2 if result.group(1) else 1 encoding = result.group(3).decode("ascii") # Handle some aliases that Python is happy to accept and that are used in the wild. if encoding.startswith(("iso-latin-1-", "latin-1-")) or encoding == "iso-latin-1": encoding = "latin-1" return encoding, line else: default_encoding = "utf8" return default_encoding, -1 def bytes_to_human_readable_repr(b: bytes) -> str: """Converts bytes into some human-readable representation. Unprintable bytes such as the nul byte are escaped. For example: >>> b = bytes([102, 111, 111, 10, 0]) >>> s = bytes_to_human_readable_repr(b) >>> print(s) foo\n\x00 >>> print(repr(s)) 'foo\\n\\x00' """ return repr(b)[2:-1] class DecodeError(Exception): """Exception raised when a file cannot be decoded due to an unknown encoding type. Essentially a wrapper for the LookupError raised by `bytearray.decode` """ def decode_python_encoding(source: bytes) -> str: """Read the Python file with while obeying PEP-263 encoding detection. Returns the source as a string. """ # check for BOM UTF-8 encoding and strip it out if present if source.startswith(b"\xef\xbb\xbf"): encoding = "utf8" source = source[3:] else: # look at first two lines and check if PEP-263 coding is present encoding, _ = find_python_encoding(source) try: source_text = source.decode(encoding) except LookupError as lookuperr: raise DecodeError(str(lookuperr)) from lookuperr return source_text def read_py_file(path: str, read: Callable[[str], bytes]) -> list[str] | None: """Try reading a Python file as list of source lines. Return None if something goes wrong. """ try: source = read(path) except OSError: return None else: try: source_lines = decode_python_encoding(source).splitlines() except DecodeError: return None return source_lines def trim_source_line(line: str, max_len: int, col: int, min_width: int) -> tuple[str, int]: """Trim a line of source code to fit into max_len. Show 'min_width' characters on each side of 'col' (an error location). If either start or end is trimmed, this is indicated by adding '...' there. A typical result looks like this: ...some_variable = function_to_call(one_arg, other_arg) or... Return the trimmed string and the column offset to to adjust error location. """ if max_len < 2 * min_width + 1: # In case the window is too tiny it is better to still show something. max_len = 2 * min_width + 1 # Trivial case: line already fits in. if len(line) <= max_len: return line, 0 # If column is not too large so that there is still min_width after it, # the line doesn't need to be trimmed at the start. if col + min_width < max_len: return line[:max_len] + "...", 0 # Otherwise, if the column is not too close to the end, trim both sides. if col < len(line) - min_width - 1: offset = col - max_len + min_width + 1 return "..." + line[offset : col + min_width + 1] + "...", offset - 3 # Finally, if the column is near the end, just trim the start. return "..." + line[-max_len:], len(line) - max_len - 3 def get_mypy_comments(source: str) -> list[tuple[int, str]]: PREFIX = "# mypy: " # Don't bother splitting up the lines unless we know it is useful if PREFIX not in source: return [] lines = source.split("\n") results = [] for i, line in enumerate(lines): if line.startswith(PREFIX): results.append((i + 1, line[len(PREFIX) :])) return results JUNIT_HEADER_TEMPLATE: Final = """<?xml version="1.0" encoding="utf-8"?> <testsuite errors="{errors}" failures="{failures}" name="mypy" skips="0" tests="{tests}" time="{time:.3f}"> """ JUNIT_TESTCASE_FAIL_TEMPLATE: Final = """ <testcase classname="mypy" file="{filename}" line="1" name="{name}" time="{time:.3f}"> <failure message="mypy produced messages">{text}</failure> </testcase> """ JUNIT_ERROR_TEMPLATE: Final = """ <testcase classname="mypy" file="mypy" line="1" name="mypy-py{ver}-{platform}" time="{time:.3f}"> <error message="mypy produced errors">{text}</error> </testcase> """ JUNIT_TESTCASE_PASS_TEMPLATE: Final = """ <testcase classname="mypy" file="mypy" line="1" name="mypy-py{ver}-{platform}" time="{time:.3f}"> </testcase> """ JUNIT_FOOTER: Final = """</testsuite> """ def _generate_junit_contents( dt: float, serious: bool, messages_by_file: dict[str | None, list[str]], version: str, platform: str, ) -> str: from xml.sax.saxutils import escape if serious: failures = 0 errors = len(messages_by_file) else: failures = len(messages_by_file) errors = 0 xml = JUNIT_HEADER_TEMPLATE.format( errors=errors, failures=failures, time=dt, # If there are no messages, we still write one "test" indicating success. tests=len(messages_by_file) or 1, ) if not messages_by_file: xml += JUNIT_TESTCASE_PASS_TEMPLATE.format(time=dt, ver=version, platform=platform) else: for filename, messages in messages_by_file.items(): if filename is not None: xml += JUNIT_TESTCASE_FAIL_TEMPLATE.format( text=escape("\n".join(messages)), filename=filename, time=dt, name="mypy-py{ver}-{platform} {filename}".format( ver=version, platform=platform, filename=filename ), ) else: xml += JUNIT_TESTCASE_FAIL_TEMPLATE.format( text=escape("\n".join(messages)), filename="mypy", time=dt, name=f"mypy-py{version}-{platform}", ) xml += JUNIT_FOOTER return xml def write_junit_xml( dt: float, serious: bool, messages_by_file: dict[str | None, list[str]], path: str, version: str, platform: str, ) -> None: xml = _generate_junit_contents(dt, serious, messages_by_file, version, platform) # creates folders if needed xml_dirs = os.path.dirname(os.path.abspath(path)) os.makedirs(xml_dirs, exist_ok=True) with open(path, "wb") as f: f.write(xml.encode("utf-8")) class IdMapper: """Generate integer ids for objects. Unlike id(), these start from 0 and increment by 1, and ids won't get reused across the life-time of IdMapper. Assume objects don't redefine __eq__ or __hash__. """ def __init__(self) -> None: self.id_map: dict[object, int] = {} self.next_id = 0 def id(self, o: object) -> int: if o not in self.id_map: self.id_map[o] = self.next_id self.next_id += 1 return self.id_map[o] def get_prefix(fullname: str) -> str: """Drop the final component of a qualified name (e.g. ('x.y' -> 'x').""" return fullname.rsplit(".", 1)[0] def correct_relative_import( cur_mod_id: str, relative: int, target: str, is_cur_package_init_file: bool ) -> tuple[str, bool]: if relative == 0: return target, True parts = cur_mod_id.split(".") rel = relative if is_cur_package_init_file: rel -= 1 ok = len(parts) >= rel if rel != 0: cur_mod_id = ".".join(parts[:-rel]) return cur_mod_id + (("." + target) if target else ""), ok fields_cache: Final[dict[type[object], list[str]]] = {} def get_class_descriptors(cls: type[object]) -> Sequence[str]: import inspect # Lazy import for minor startup speed win # Maintain a cache of type -> attributes defined by descriptors in the class # (that is, attributes from __slots__ and C extension classes) if cls not in fields_cache: members = inspect.getmembers( cls, lambda o: inspect.isgetsetdescriptor(o) or inspect.ismemberdescriptor(o) ) fields_cache[cls] = [x for x, y in members if x != "__weakref__" and x != "__dict__"] return fields_cache[cls] def replace_object_state( new: object, old: object, copy_dict: bool = False, skip_slots: tuple[str, ...] = () ) -> None: """Copy state of old node to the new node. This handles cases where there is __dict__ and/or attribute descriptors (either from slots or because the type is defined in a C extension module). Assume that both objects have the same __class__. """ if hasattr(old, "__dict__"): if copy_dict: new.__dict__ = dict(old.__dict__) else: new.__dict__ = old.__dict__ for attr in get_class_descriptors(old.__class__): if attr in skip_slots: continue try: if hasattr(old, attr): setattr(new, attr, getattr(old, attr)) elif hasattr(new, attr): delattr(new, attr) # There is no way to distinguish getsetdescriptors that allow # writes from ones that don't (I think?), so we just ignore # AttributeErrors if we need to. # TODO: What about getsetdescriptors that act like properties??? except AttributeError: pass def is_sub_path_normabs(path: str, dir: str) -> bool: """Given two paths, return if path is a sub-path of dir. Moral equivalent of: Path(dir) in Path(path).parents Similar to the pathlib version: - Treats paths case-sensitively - Does not fully handle unnormalised paths (e.g. paths with "..") - Does not handle a mix of absolute and relative paths Unlike the pathlib version: - Fast - On Windows, assumes input has been slash normalised - Handles even fewer unnormalised paths (e.g. paths with "." and "//") As a result, callers should ensure that inputs have had os.path.abspath called on them (note that os.path.abspath will normalise) """ if not dir.endswith(os.sep): dir += os.sep return path.startswith(dir) if sys.platform == "linux" or sys.platform == "darwin": def os_path_join(path: str, b: str) -> str: # Based off of os.path.join, but simplified to str-only, 2 args and mypyc can compile it. if b.startswith("/") or not path: return b elif path.endswith("/"): return path + b else: return path + "/" + b else: def os_path_join(a: str, p: str) -> str: return os.path.join(a, p) def hard_exit(status: int = 0) -> None: """Kill the current process without fully cleaning up. This can be quite a bit faster than a normal exit() since objects are not freed. """ sys.stdout.flush() sys.stderr.flush() os._exit(status) def unmangle(name: str) -> str: """Remove internal suffixes from a short name.""" return name.rstrip("'") def get_unique_redefinition_name(name: str, existing: Container[str]) -> str: """Get a simple redefinition name not present among existing. For example, for name 'foo' we try 'foo-redefinition', 'foo-redefinition2', 'foo-redefinition3', etc. until we find one that is not in existing. """ r_name = name + "-redefinition" if r_name not in existing: return r_name i = 2 while r_name + str(i) in existing: i += 1 return r_name + str(i) def check_python_version(program: str) -> None: """Report issues with the Python used to run mypy, dmypy, or stubgen""" # Check for known bad Python versions. if sys.version_info[:2] < (3, 8): # noqa: UP036 sys.exit( "Running {name} with Python 3.7 or lower is not supported; " "please upgrade to 3.8 or newer".format(name=program) ) def count_stats(messages: list[str]) -> tuple[int, int, int]: """Count total number of errors, notes and error_files in message list.""" errors = [e for e in messages if ": error:" in e] error_files = {e.split(":")[0] for e in errors} notes = [e for e in messages if ": note:" in e] return len(errors), len(notes), len(error_files) def split_words(msg: str) -> list[str]: """Split line of text into words (but not within quoted groups).""" next_word = "" res: list[str] = [] allow_break = True for c in msg: if c == " " and allow_break: res.append(next_word) next_word = "" continue if c == '"': allow_break = not allow_break next_word += c res.append(next_word) return res def get_terminal_width() -> int: """Get current terminal width if possible, otherwise return the default one.""" return ( int(os.getenv("MYPY_FORCE_TERMINAL_WIDTH", "0")) or shutil.get_terminal_size().columns or DEFAULT_COLUMNS ) def soft_wrap(msg: str, max_len: int, first_offset: int, num_indent: int = 0) -> str: """Wrap a long error message into few lines. Breaks will only happen between words, and never inside a quoted group (to avoid breaking types such as "Union[int, str]"). The 'first_offset' is the width before the start of first line. Pad every next line with 'num_indent' spaces. Every line will be at most 'max_len' characters, except if it is a single word or quoted group. For example: first_offset ------------------------ path/to/file: error: 58: Some very long error message that needs to be split in separate lines. "Long[Type, Names]" are never split. ^^^^-------------------------------------------------- num_indent max_len """ words = split_words(msg) next_line = words.pop(0) lines: list[str] = [] while words: next_word = words.pop(0) max_line_len = max_len - num_indent if lines else max_len - first_offset # Add 1 to account for space between words. if len(next_line) + len(next_word) + 1 <= max_line_len: next_line += " " + next_word else: lines.append(next_line) next_line = next_word lines.append(next_line) padding = "\n" + " " * num_indent return padding.join(lines) def hash_digest(data: bytes) -> str: """Compute a hash digest of some data. We use a cryptographic hash because we want a low probability of accidental collision, but we don't really care about any of the cryptographic properties. """ return hashlib.sha1(data).hexdigest() def parse_gray_color(cup: bytes) -> str: """Reproduce a gray color in ANSI escape sequence""" if sys.platform == "win32": assert False, "curses is not available on Windows" set_color = "".join([cup[:-1].decode(), "m"]) gray = curses.tparm(set_color.encode("utf-8"), 1, 9).decode() return gray def should_force_color() -> bool: env_var = os.getenv("MYPY_FORCE_COLOR", os.getenv("FORCE_COLOR", "0")) try: return bool(int(env_var)) except ValueError: return bool(env_var) class FancyFormatter: """Apply color and bold font to terminal output. This currently only works on Linux and Mac. """ def __init__( self, f_out: IO[str], f_err: IO[str], hide_error_codes: bool, hide_success: bool = False ) -> None: self.hide_error_codes = hide_error_codes self.hide_success = hide_success # Check if we are in a human-facing terminal on a supported platform. if sys.platform not in ("linux", "darwin", "win32", "emscripten"): self.dummy_term = True return if not should_force_color() and (not f_out.isatty() or not f_err.isatty()): self.dummy_term = True return if sys.platform == "win32": self.dummy_term = not self.initialize_win_colors() elif sys.platform == "emscripten": self.dummy_term = not self.initialize_vt100_colors() else: self.dummy_term = not self.initialize_unix_colors() if not self.dummy_term: self.colors = { "red": self.RED, "green": self.GREEN, "blue": self.BLUE, "yellow": self.YELLOW, "none": "", } def initialize_vt100_colors(self) -> bool: """Return True if initialization was successful and we can use colors, False otherwise""" # Windows and Emscripten can both use ANSI/VT100 escape sequences for color assert sys.platform in ("win32", "emscripten") self.BOLD = "\033[1m" self.UNDER = "\033[4m" self.BLUE = "\033[94m" self.GREEN = "\033[92m" self.RED = "\033[91m" self.YELLOW = "\033[93m" self.NORMAL = "\033[0m" self.DIM = "\033[2m" return True def initialize_win_colors(self) -> bool: """Return True if initialization was successful and we can use colors, False otherwise""" # Windows ANSI escape sequences are only supported on Threshold 2 and above. # we check with an assert at runtime and an if check for mypy, as asserts do not # yet narrow platform assert sys.platform == "win32" if sys.platform == "win32": winver = sys.getwindowsversion() if ( winver.major < MINIMUM_WINDOWS_MAJOR_VT100 or winver.build < MINIMUM_WINDOWS_BUILD_VT100 ): return False import ctypes kernel32 = ctypes.windll.kernel32 ENABLE_PROCESSED_OUTPUT = 0x1 ENABLE_WRAP_AT_EOL_OUTPUT = 0x2 ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4 STD_OUTPUT_HANDLE = -11 kernel32.SetConsoleMode( kernel32.GetStdHandle(STD_OUTPUT_HANDLE), ENABLE_PROCESSED_OUTPUT | ENABLE_WRAP_AT_EOL_OUTPUT | ENABLE_VIRTUAL_TERMINAL_PROCESSING, ) self.initialize_vt100_colors() return True return False def initialize_unix_colors(self) -> bool: """Return True if initialization was successful and we can use colors, False otherwise""" if sys.platform == "win32" or not CURSES_ENABLED: return False try: # setupterm wants a fd to potentially write an "initialization sequence". # We override sys.stdout for the daemon API so if stdout doesn't have an fd, # just give it /dev/null. try: fd = sys.stdout.fileno() except io.UnsupportedOperation: with open("/dev/null", "rb") as f: curses.setupterm(fd=f.fileno()) else: curses.setupterm(fd=fd) except curses.error: # Most likely terminfo not found. return False bold = curses.tigetstr("bold") under = curses.tigetstr("smul") set_color = curses.tigetstr("setaf") set_eseq = curses.tigetstr("cup") normal = curses.tigetstr("sgr0") if not (bold and under and set_color and set_eseq and normal): return False self.NORMAL = normal.decode() self.BOLD = bold.decode() self.UNDER = under.decode() self.DIM = parse_gray_color(set_eseq) self.BLUE = curses.tparm(set_color, curses.COLOR_BLUE).decode() self.GREEN = curses.tparm(set_color, curses.COLOR_GREEN).decode() self.RED = curses.tparm(set_color, curses.COLOR_RED).decode() self.YELLOW = curses.tparm(set_color, curses.COLOR_YELLOW).decode() return True def style( self, text: str, color: Literal["red", "green", "blue", "yellow", "none"], bold: bool = False, underline: bool = False, dim: bool = False, ) -> str: """Apply simple color and style (underlined or bold).""" if self.dummy_term: return text if bold: start = self.BOLD else: start = "" if underline: start += self.UNDER if dim: start += self.DIM return start + self.colors[color] + text + self.NORMAL def fit_in_terminal( self, messages: list[str], fixed_terminal_width: int | None = None ) -> list[str]: """Improve readability by wrapping error messages and trimming source code.""" width = fixed_terminal_width or get_terminal_width() new_messages = messages.copy() for i, error in enumerate(messages): if ": error:" in error: loc, msg = error.split("error:", maxsplit=1) msg = soft_wrap(msg, width, first_offset=len(loc) + len("error: ")) new_messages[i] = loc + "error:" + msg if error.startswith(" " * DEFAULT_SOURCE_OFFSET) and "^" not in error: # TODO: detecting source code highlights through an indent can be surprising. # Restore original error message and error location. error = error[DEFAULT_SOURCE_OFFSET:] marker_line = messages[i + 1] marker_column = marker_line.index("^") column = marker_column - DEFAULT_SOURCE_OFFSET if "~" not in marker_line: marker = "^" else: # +1 because both ends are included marker = marker_line[marker_column : marker_line.rindex("~") + 1] # Let source have some space also on the right side, plus 6 # to accommodate ... on each side. max_len = width - DEFAULT_SOURCE_OFFSET - 6 source_line, offset = trim_source_line(error, max_len, column, MINIMUM_WIDTH) new_messages[i] = " " * DEFAULT_SOURCE_OFFSET + source_line # Also adjust the error marker position and trim error marker is needed. new_marker_line = " " * (DEFAULT_SOURCE_OFFSET + column - offset) + marker if len(new_marker_line) > len(new_messages[i]) and len(marker) > 3: new_marker_line = new_marker_line[: len(new_messages[i]) - 3] + "..." new_messages[i + 1] = new_marker_line return new_messages def colorize(self, error: str) -> str: """Colorize an output line by highlighting the status and error code.""" if ": error:" in error: loc, msg = error.split("error:", maxsplit=1) if self.hide_error_codes: return ( loc + self.style("error:", "red", bold=True) + self.highlight_quote_groups(msg) ) codepos = msg.rfind("[") if codepos != -1: code = msg[codepos:] msg = msg[:codepos] else: code = "" # no error code specified return ( loc + self.style("error:", "red", bold=True) + self.highlight_quote_groups(msg) + self.style(code, "yellow") ) elif ": note:" in error: loc, msg = error.split("note:", maxsplit=1) formatted = self.highlight_quote_groups(self.underline_link(msg)) return loc + self.style("note:", "blue") + formatted elif error.startswith(" " * DEFAULT_SOURCE_OFFSET): # TODO: detecting source code highlights through an indent can be surprising. if "^" not in error: return self.style(error, "none", dim=True) return self.style(error, "red") else: return error def highlight_quote_groups(self, msg: str) -> str: """Make groups quoted with double quotes bold (including quotes). This is used to highlight types, attribute names etc. """ if msg.count('"') % 2: # Broken error message, don't do any formatting. return msg parts = msg.split('"') out = "" for i, part in enumerate(parts): if i % 2 == 0: out += self.style(part, "none") else: out += self.style('"' + part + '"', "none", bold=True) return out def underline_link(self, note: str) -> str: """Underline a link in a note message (if any). This assumes there is at most one link in the message. """ match = re.search(r"https?://\S*", note) if not match: return note start = match.start() end = match.end() return note[:start] + self.style(note[start:end], "none", underline=True) + note[end:] def format_success(self, n_sources: int, use_color: bool = True) -> str: """Format short summary in case of success. n_sources is total number of files passed directly on command line, i.e. excluding stubs and followed imports. """ if self.hide_success: return "" msg = f"Success: no issues found in {n_sources} source file{plural_s(n_sources)}" if not use_color: return msg return self.style(msg, "green", bold=True) def format_error( self, n_errors: int, n_files: int, n_sources: int, *, blockers: bool = False, use_color: bool = True, ) -> str: """Format a short summary in case of errors.""" msg = f"Found {n_errors} error{plural_s(n_errors)} in {n_files} file{plural_s(n_files)}" if blockers: msg += " (errors prevented further checking)" else: msg += f" (checked {n_sources} source file{plural_s(n_sources)})" if not use_color: return msg return self.style(msg, "red", bold=True) def is_typeshed_file(typeshed_dir: str | None, file: str) -> bool: typeshed_dir = typeshed_dir if typeshed_dir is not None else TYPESHED_DIR try: return os.path.commonpath((typeshed_dir, os.path.abspath(file))) == typeshed_dir except ValueError: # Different drives on Windows return False def is_stub_package_file(file: str) -> bool: # Use hacky heuristics to check whether file is part of a PEP 561 stub package. if not file.endswith(".pyi"): return False return any(component.endswith("-stubs") for component in os.path.split(os.path.abspath(file))) def unnamed_function(name: str | None) -> bool: return name is not None and name == "_" time_ref = time.perf_counter_ns def time_spent_us(t0: int) -> int: return int((time.perf_counter_ns() - t0) / 1000) def plural_s(s: int | Sized) -> str: count = s if isinstance(s, int) else len(s) if count != 1: return "s" else: return "" def quote_docstring(docstr: str) -> str: """Returns docstring correctly encapsulated in a single or double quoted form.""" # Uses repr to get hint on the correct quotes and escape everything properly. # Creating multiline string for prettier output. docstr_repr = "\n".join(re.split(r"(?<=[^\\])\\n", repr(docstr))) if docstr_repr.startswith("'"): # Enforce double quotes when it's safe to do so. # That is when double quotes are not in the string # or when it doesn't end with a single quote. if '"' not in docstr_repr[1:-1] and docstr_repr[-2] != "'": return f'"""{docstr_repr[1:-1]}"""' return f"''{docstr_repr}''" else: return f'""{docstr_repr}""' def json_dumps(obj: object, debug: bool = False) -> bytes: if orjson is not None: if debug: return orjson.dumps(obj, option=orjson.OPT_INDENT_2 | orjson.OPT_SORT_KEYS) # type: ignore[no-any-return] else: # TODO: If we don't sort keys here, testIncrementalInternalScramble fails # We should document exactly what is going on there return orjson.dumps(obj, option=orjson.OPT_SORT_KEYS) # type: ignore[no-any-return] if debug: return json.dumps(obj, indent=2, sort_keys=True).encode("utf-8") else: # See above for sort_keys comment return json.dumps(obj, sort_keys=True, separators=(",", ":")).encode("utf-8") def json_loads(data: bytes) -> Any: if orjson is not None: return orjson.loads(data) return json.loads(data)
algorandfoundation/puya
src/puyapy/_vendor/mypy/util.py
Python
NOASSERTION
32,334
from __future__ import annotations import os from mypy import git # Base version. # - Release versions have the form "1.2.3". # - Dev versions have the form "1.2.3+dev" (PLUS sign to conform to PEP 440). # - Before 1.0 we had the form "0.NNN". __version__ = "1.13.0" base_version = __version__ mypy_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) if __version__.endswith("+dev") and git.is_git_repo(mypy_dir) and git.have_git(): __version__ += "." + git.git_revision(mypy_dir).decode("utf-8") if git.is_dirty(mypy_dir): __version__ += ".dirty" del mypy_dir
algorandfoundation/puya
src/puyapy/_vendor/mypy/version.py
Python
NOASSERTION
595
"""Generic abstract syntax tree node visitor""" from __future__ import annotations from abc import abstractmethod from typing import TYPE_CHECKING, Generic, TypeVar from mypy_extensions import mypyc_attr, trait if TYPE_CHECKING: # break import cycle only needed for mypy import mypy.nodes import mypy.patterns T = TypeVar("T") @trait @mypyc_attr(allow_interpreted_subclasses=True) class ExpressionVisitor(Generic[T]): @abstractmethod def visit_int_expr(self, o: mypy.nodes.IntExpr) -> T: pass @abstractmethod def visit_str_expr(self, o: mypy.nodes.StrExpr) -> T: pass @abstractmethod def visit_bytes_expr(self, o: mypy.nodes.BytesExpr) -> T: pass @abstractmethod def visit_float_expr(self, o: mypy.nodes.FloatExpr) -> T: pass @abstractmethod def visit_complex_expr(self, o: mypy.nodes.ComplexExpr) -> T: pass @abstractmethod def visit_ellipsis(self, o: mypy.nodes.EllipsisExpr) -> T: pass @abstractmethod def visit_star_expr(self, o: mypy.nodes.StarExpr) -> T: pass @abstractmethod def visit_name_expr(self, o: mypy.nodes.NameExpr) -> T: pass @abstractmethod def visit_member_expr(self, o: mypy.nodes.MemberExpr) -> T: pass @abstractmethod def visit_yield_from_expr(self, o: mypy.nodes.YieldFromExpr) -> T: pass @abstractmethod def visit_yield_expr(self, o: mypy.nodes.YieldExpr) -> T: pass @abstractmethod def visit_call_expr(self, o: mypy.nodes.CallExpr) -> T: pass @abstractmethod def visit_op_expr(self, o: mypy.nodes.OpExpr) -> T: pass @abstractmethod def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr) -> T: pass @abstractmethod def visit_cast_expr(self, o: mypy.nodes.CastExpr) -> T: pass @abstractmethod def visit_assert_type_expr(self, o: mypy.nodes.AssertTypeExpr) -> T: pass @abstractmethod def visit_reveal_expr(self, o: mypy.nodes.RevealExpr) -> T: pass @abstractmethod def visit_super_expr(self, o: mypy.nodes.SuperExpr) -> T: pass @abstractmethod def visit_unary_expr(self, o: mypy.nodes.UnaryExpr) -> T: pass @abstractmethod def visit_assignment_expr(self, o: mypy.nodes.AssignmentExpr) -> T: pass @abstractmethod def visit_list_expr(self, o: mypy.nodes.ListExpr) -> T: pass @abstractmethod def visit_dict_expr(self, o: mypy.nodes.DictExpr) -> T: pass @abstractmethod def visit_tuple_expr(self, o: mypy.nodes.TupleExpr) -> T: pass @abstractmethod def visit_set_expr(self, o: mypy.nodes.SetExpr) -> T: pass @abstractmethod def visit_index_expr(self, o: mypy.nodes.IndexExpr) -> T: pass @abstractmethod def visit_type_application(self, o: mypy.nodes.TypeApplication) -> T: pass @abstractmethod def visit_lambda_expr(self, o: mypy.nodes.LambdaExpr) -> T: pass @abstractmethod def visit_list_comprehension(self, o: mypy.nodes.ListComprehension) -> T: pass @abstractmethod def visit_set_comprehension(self, o: mypy.nodes.SetComprehension) -> T: pass @abstractmethod def visit_dictionary_comprehension(self, o: mypy.nodes.DictionaryComprehension) -> T: pass @abstractmethod def visit_generator_expr(self, o: mypy.nodes.GeneratorExpr) -> T: pass @abstractmethod def visit_slice_expr(self, o: mypy.nodes.SliceExpr) -> T: pass @abstractmethod def visit_conditional_expr(self, o: mypy.nodes.ConditionalExpr) -> T: pass @abstractmethod def visit_type_var_expr(self, o: mypy.nodes.TypeVarExpr) -> T: pass @abstractmethod def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr) -> T: pass @abstractmethod def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> T: pass @abstractmethod def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr) -> T: pass @abstractmethod def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr) -> T: pass @abstractmethod def visit_enum_call_expr(self, o: mypy.nodes.EnumCallExpr) -> T: pass @abstractmethod def visit_typeddict_expr(self, o: mypy.nodes.TypedDictExpr) -> T: pass @abstractmethod def visit_newtype_expr(self, o: mypy.nodes.NewTypeExpr) -> T: pass @abstractmethod def visit__promote_expr(self, o: mypy.nodes.PromoteExpr) -> T: pass @abstractmethod def visit_await_expr(self, o: mypy.nodes.AwaitExpr) -> T: pass @abstractmethod def visit_temp_node(self, o: mypy.nodes.TempNode) -> T: pass @trait @mypyc_attr(allow_interpreted_subclasses=True) class StatementVisitor(Generic[T]): # Definitions @abstractmethod def visit_assignment_stmt(self, o: mypy.nodes.AssignmentStmt) -> T: pass @abstractmethod def visit_for_stmt(self, o: mypy.nodes.ForStmt) -> T: pass @abstractmethod def visit_with_stmt(self, o: mypy.nodes.WithStmt) -> T: pass @abstractmethod def visit_del_stmt(self, o: mypy.nodes.DelStmt) -> T: pass @abstractmethod def visit_func_def(self, o: mypy.nodes.FuncDef) -> T: pass @abstractmethod def visit_overloaded_func_def(self, o: mypy.nodes.OverloadedFuncDef) -> T: pass @abstractmethod def visit_class_def(self, o: mypy.nodes.ClassDef) -> T: pass @abstractmethod def visit_global_decl(self, o: mypy.nodes.GlobalDecl) -> T: pass @abstractmethod def visit_nonlocal_decl(self, o: mypy.nodes.NonlocalDecl) -> T: pass @abstractmethod def visit_decorator(self, o: mypy.nodes.Decorator) -> T: pass # Module structure @abstractmethod def visit_import(self, o: mypy.nodes.Import) -> T: pass @abstractmethod def visit_import_from(self, o: mypy.nodes.ImportFrom) -> T: pass @abstractmethod def visit_import_all(self, o: mypy.nodes.ImportAll) -> T: pass # Statements @abstractmethod def visit_block(self, o: mypy.nodes.Block) -> T: pass @abstractmethod def visit_expression_stmt(self, o: mypy.nodes.ExpressionStmt) -> T: pass @abstractmethod def visit_operator_assignment_stmt(self, o: mypy.nodes.OperatorAssignmentStmt) -> T: pass @abstractmethod def visit_while_stmt(self, o: mypy.nodes.WhileStmt) -> T: pass @abstractmethod def visit_return_stmt(self, o: mypy.nodes.ReturnStmt) -> T: pass @abstractmethod def visit_assert_stmt(self, o: mypy.nodes.AssertStmt) -> T: pass @abstractmethod def visit_if_stmt(self, o: mypy.nodes.IfStmt) -> T: pass @abstractmethod def visit_break_stmt(self, o: mypy.nodes.BreakStmt) -> T: pass @abstractmethod def visit_continue_stmt(self, o: mypy.nodes.ContinueStmt) -> T: pass @abstractmethod def visit_pass_stmt(self, o: mypy.nodes.PassStmt) -> T: pass @abstractmethod def visit_raise_stmt(self, o: mypy.nodes.RaiseStmt) -> T: pass @abstractmethod def visit_try_stmt(self, o: mypy.nodes.TryStmt) -> T: pass @abstractmethod def visit_match_stmt(self, o: mypy.nodes.MatchStmt) -> T: pass @abstractmethod def visit_type_alias_stmt(self, o: mypy.nodes.TypeAliasStmt) -> T: pass @trait @mypyc_attr(allow_interpreted_subclasses=True) class PatternVisitor(Generic[T]): @abstractmethod def visit_as_pattern(self, o: mypy.patterns.AsPattern) -> T: pass @abstractmethod def visit_or_pattern(self, o: mypy.patterns.OrPattern) -> T: pass @abstractmethod def visit_value_pattern(self, o: mypy.patterns.ValuePattern) -> T: pass @abstractmethod def visit_singleton_pattern(self, o: mypy.patterns.SingletonPattern) -> T: pass @abstractmethod def visit_sequence_pattern(self, o: mypy.patterns.SequencePattern) -> T: pass @abstractmethod def visit_starred_pattern(self, o: mypy.patterns.StarredPattern) -> T: pass @abstractmethod def visit_mapping_pattern(self, o: mypy.patterns.MappingPattern) -> T: pass @abstractmethod def visit_class_pattern(self, o: mypy.patterns.ClassPattern) -> T: pass @trait @mypyc_attr(allow_interpreted_subclasses=True) class NodeVisitor(Generic[T], ExpressionVisitor[T], StatementVisitor[T], PatternVisitor[T]): """Empty base class for parse tree node visitors. The T type argument specifies the return type of the visit methods. As all methods defined here return None by default, subclasses do not always need to override all the methods. TODO: make the default return value explicit, then turn on empty body checking in mypy_self_check.ini. """ # Not in superclasses: def visit_mypy_file(self, o: mypy.nodes.MypyFile) -> T: pass # TODO: We have a visit_var method, but no visit_typeinfo or any # other non-Statement SymbolNode (accepting those will raise a # runtime error). Maybe this should be resolved in some direction. def visit_var(self, o: mypy.nodes.Var) -> T: pass # Module structure def visit_import(self, o: mypy.nodes.Import) -> T: pass def visit_import_from(self, o: mypy.nodes.ImportFrom) -> T: pass def visit_import_all(self, o: mypy.nodes.ImportAll) -> T: pass # Definitions def visit_func_def(self, o: mypy.nodes.FuncDef) -> T: pass def visit_overloaded_func_def(self, o: mypy.nodes.OverloadedFuncDef) -> T: pass def visit_class_def(self, o: mypy.nodes.ClassDef) -> T: pass def visit_global_decl(self, o: mypy.nodes.GlobalDecl) -> T: pass def visit_nonlocal_decl(self, o: mypy.nodes.NonlocalDecl) -> T: pass def visit_decorator(self, o: mypy.nodes.Decorator) -> T: pass def visit_type_alias(self, o: mypy.nodes.TypeAlias) -> T: pass def visit_placeholder_node(self, o: mypy.nodes.PlaceholderNode) -> T: pass # Statements def visit_block(self, o: mypy.nodes.Block) -> T: pass def visit_expression_stmt(self, o: mypy.nodes.ExpressionStmt) -> T: pass def visit_assignment_stmt(self, o: mypy.nodes.AssignmentStmt) -> T: pass def visit_operator_assignment_stmt(self, o: mypy.nodes.OperatorAssignmentStmt) -> T: pass def visit_while_stmt(self, o: mypy.nodes.WhileStmt) -> T: pass def visit_for_stmt(self, o: mypy.nodes.ForStmt) -> T: pass def visit_return_stmt(self, o: mypy.nodes.ReturnStmt) -> T: pass def visit_assert_stmt(self, o: mypy.nodes.AssertStmt) -> T: pass def visit_del_stmt(self, o: mypy.nodes.DelStmt) -> T: pass def visit_if_stmt(self, o: mypy.nodes.IfStmt) -> T: pass def visit_break_stmt(self, o: mypy.nodes.BreakStmt) -> T: pass def visit_continue_stmt(self, o: mypy.nodes.ContinueStmt) -> T: pass def visit_pass_stmt(self, o: mypy.nodes.PassStmt) -> T: pass def visit_raise_stmt(self, o: mypy.nodes.RaiseStmt) -> T: pass def visit_try_stmt(self, o: mypy.nodes.TryStmt) -> T: pass def visit_with_stmt(self, o: mypy.nodes.WithStmt) -> T: pass def visit_match_stmt(self, o: mypy.nodes.MatchStmt) -> T: pass def visit_type_alias_stmt(self, o: mypy.nodes.TypeAliasStmt) -> T: pass # Expressions (default no-op implementation) def visit_int_expr(self, o: mypy.nodes.IntExpr) -> T: pass def visit_str_expr(self, o: mypy.nodes.StrExpr) -> T: pass def visit_bytes_expr(self, o: mypy.nodes.BytesExpr) -> T: pass def visit_float_expr(self, o: mypy.nodes.FloatExpr) -> T: pass def visit_complex_expr(self, o: mypy.nodes.ComplexExpr) -> T: pass def visit_ellipsis(self, o: mypy.nodes.EllipsisExpr) -> T: pass def visit_star_expr(self, o: mypy.nodes.StarExpr) -> T: pass def visit_name_expr(self, o: mypy.nodes.NameExpr) -> T: pass def visit_member_expr(self, o: mypy.nodes.MemberExpr) -> T: pass def visit_yield_from_expr(self, o: mypy.nodes.YieldFromExpr) -> T: pass def visit_yield_expr(self, o: mypy.nodes.YieldExpr) -> T: pass def visit_call_expr(self, o: mypy.nodes.CallExpr) -> T: pass def visit_op_expr(self, o: mypy.nodes.OpExpr) -> T: pass def visit_comparison_expr(self, o: mypy.nodes.ComparisonExpr) -> T: pass def visit_cast_expr(self, o: mypy.nodes.CastExpr) -> T: pass def visit_assert_type_expr(self, o: mypy.nodes.AssertTypeExpr) -> T: pass def visit_reveal_expr(self, o: mypy.nodes.RevealExpr) -> T: pass def visit_super_expr(self, o: mypy.nodes.SuperExpr) -> T: pass def visit_assignment_expr(self, o: mypy.nodes.AssignmentExpr) -> T: pass def visit_unary_expr(self, o: mypy.nodes.UnaryExpr) -> T: pass def visit_list_expr(self, o: mypy.nodes.ListExpr) -> T: pass def visit_dict_expr(self, o: mypy.nodes.DictExpr) -> T: pass def visit_tuple_expr(self, o: mypy.nodes.TupleExpr) -> T: pass def visit_set_expr(self, o: mypy.nodes.SetExpr) -> T: pass def visit_index_expr(self, o: mypy.nodes.IndexExpr) -> T: pass def visit_type_application(self, o: mypy.nodes.TypeApplication) -> T: pass def visit_lambda_expr(self, o: mypy.nodes.LambdaExpr) -> T: pass def visit_list_comprehension(self, o: mypy.nodes.ListComprehension) -> T: pass def visit_set_comprehension(self, o: mypy.nodes.SetComprehension) -> T: pass def visit_dictionary_comprehension(self, o: mypy.nodes.DictionaryComprehension) -> T: pass def visit_generator_expr(self, o: mypy.nodes.GeneratorExpr) -> T: pass def visit_slice_expr(self, o: mypy.nodes.SliceExpr) -> T: pass def visit_conditional_expr(self, o: mypy.nodes.ConditionalExpr) -> T: pass def visit_type_var_expr(self, o: mypy.nodes.TypeVarExpr) -> T: pass def visit_paramspec_expr(self, o: mypy.nodes.ParamSpecExpr) -> T: pass def visit_type_var_tuple_expr(self, o: mypy.nodes.TypeVarTupleExpr) -> T: pass def visit_type_alias_expr(self, o: mypy.nodes.TypeAliasExpr) -> T: pass def visit_namedtuple_expr(self, o: mypy.nodes.NamedTupleExpr) -> T: pass def visit_enum_call_expr(self, o: mypy.nodes.EnumCallExpr) -> T: pass def visit_typeddict_expr(self, o: mypy.nodes.TypedDictExpr) -> T: pass def visit_newtype_expr(self, o: mypy.nodes.NewTypeExpr) -> T: pass def visit__promote_expr(self, o: mypy.nodes.PromoteExpr) -> T: pass def visit_await_expr(self, o: mypy.nodes.AwaitExpr) -> T: pass def visit_temp_node(self, o: mypy.nodes.TempNode) -> T: pass # Patterns def visit_as_pattern(self, o: mypy.patterns.AsPattern) -> T: pass def visit_or_pattern(self, o: mypy.patterns.OrPattern) -> T: pass def visit_value_pattern(self, o: mypy.patterns.ValuePattern) -> T: pass def visit_singleton_pattern(self, o: mypy.patterns.SingletonPattern) -> T: pass def visit_sequence_pattern(self, o: mypy.patterns.SequencePattern) -> T: pass def visit_starred_pattern(self, o: mypy.patterns.StarredPattern) -> T: pass def visit_mapping_pattern(self, o: mypy.patterns.MappingPattern) -> T: pass def visit_class_pattern(self, o: mypy.patterns.ClassPattern) -> T: pass
algorandfoundation/puya
src/puyapy/_vendor/mypy/visitor.py
Python
NOASSERTION
16,072
algorandfoundation/puya
src/puyapy/awst_build/__init__.py
Python
NOASSERTION
0
import typing from collections.abc import Iterator, Mapping, Sequence import attrs import mypy.nodes import mypy.types import mypy.visitor from puya import log from puya.errors import InternalError from puya.parse import SourceLocation from puya.program_refs import ContractReference from puyapy.awst_build import constants, pytypes from puyapy.awst_build.arc4_decorators import get_arc4_abimethod_data from puyapy.awst_build.base_mypy_visitor import BaseMyPyStatementVisitor from puyapy.awst_build.context import ASTConversionModuleContext from puyapy.awst_build.utils import get_decorators_by_fullname from puyapy.models import ( ARC4ABIMethodData, ContractFragmentBase, ContractFragmentMethod, ContractFragmentStorage, ) logger = log.get_logger(__name__) @attrs.frozen class _ARC4ClientFragment(ContractFragmentBase): id: ContractReference _methods: dict[str, ContractFragmentMethod] = attrs.field(factory=dict, init=False) mro: Sequence[ContractFragmentBase] = attrs.field(default=(), init=False) _symbols: dict[str, pytypes.PyType] = attrs.field(factory=dict, init=False) def add_method(self, data: ARC4ABIMethodData, source_location: SourceLocation) -> None: method = ContractFragmentMethod( member_name=data.member_name, metadata=data, is_trivial=True, source_location=source_location, ) set_result = self._methods.setdefault(method.member_name, method) if set_result is method: self._symbols[data.member_name] = data.pytype else: logger.info( f"previous definition of {method.member_name} was here", location=set_result.source_location, ) logger.error( f"redefinition of {method.member_name}", location=method.source_location, ) @typing.override @property def symbols(self) -> Mapping[str, pytypes.PyType | None]: return self._symbols @typing.override def resolve_method( self, name: str, *, include_inherited: bool = True ) -> ContractFragmentMethod | None: return self._methods.get(name) @typing.override def methods( self, *, include_inherited: bool = True, include_overridden: bool = False ) -> Iterator[ContractFragmentMethod]: yield from self._methods.values() @typing.override def resolve_storage( self, name: str, *, include_inherited: bool = True ) -> ContractFragmentStorage | None: return None @typing.override def state(self, *, include_inherited: bool = True) -> Iterator[ContractFragmentStorage]: yield from () class ARC4ClientASTVisitor(BaseMyPyStatementVisitor[ARC4ABIMethodData | None]): @classmethod def visit(cls, context: ASTConversionModuleContext, class_def: mypy.nodes.ClassDef) -> None: visitor = ARC4ClientASTVisitor(context) cref = ContractReference(class_def.info.fullname) fragment = _ARC4ClientFragment(id=cref) for stmt in class_def.defs.body: stmt_loc = context.node_location(stmt) with context.log_exceptions(fallback_location=stmt_loc): if (abi_method_data := stmt.accept(visitor)) is not None: fragment.add_method(abi_method_data, stmt_loc) context.add_contract_fragment(fragment) def empty_statement(self, _stmt: mypy.nodes.Statement) -> None: return None def visit_function( self, func_def: mypy.nodes.FuncDef, decorator: mypy.nodes.Decorator | None, ) -> ARC4ABIMethodData | None: func_loc = self._location(func_def) if decorator is not None: dec_by_fullname = get_decorators_by_fullname(self.context, decorator, original=True) abimethod_dec = dec_by_fullname.pop(constants.ABIMETHOD_DECORATOR, None) for dec_fullname, dec in dec_by_fullname.items(): logger.error( f'unsupported decorator in ARC4Client: "{dec_fullname}"', location=self._location(dec), ) if abimethod_dec is not None: return get_arc4_abimethod_data(self.context, abimethod_dec, func_def) logger.error(f"expected an {constants.ABIMETHOD_DECORATOR} decorator", location=func_loc) return None def visit_block(self, o: mypy.nodes.Block) -> None: raise InternalError("shouldn't get here", self._location(o)) def visit_return_stmt(self, stmt: mypy.nodes.ReturnStmt) -> None: self._error("illegal Python syntax, return in class body", location=stmt) def visit_class_def(self, cdef: mypy.nodes.ClassDef) -> None: self._error("nested classes are not supported", location=cdef) def _unsupported_stmt(self, kind: str, stmt: mypy.nodes.Statement) -> None: self._error( f"{kind} statements are not supported in the class body of an ARC4Client", location=stmt, ) def visit_assignment_stmt(self, stmt: mypy.nodes.AssignmentStmt) -> None: self._unsupported_stmt("assignment", stmt) def visit_operator_assignment_stmt(self, stmt: mypy.nodes.OperatorAssignmentStmt) -> None: self._unsupported_stmt("operator assignment", stmt) def visit_expression_stmt(self, stmt: mypy.nodes.ExpressionStmt) -> None: if isinstance(stmt.expr, mypy.nodes.StrExpr): # ignore class docstring, already extracted # TODO: should we capture field "docstrings"? pass else: self._unsupported_stmt("expression statement", stmt) def visit_if_stmt(self, stmt: mypy.nodes.IfStmt) -> None: self._unsupported_stmt("if", stmt) def visit_while_stmt(self, stmt: mypy.nodes.WhileStmt) -> None: self._unsupported_stmt("while", stmt) def visit_for_stmt(self, stmt: mypy.nodes.ForStmt) -> None: self._unsupported_stmt("for", stmt) def visit_break_stmt(self, stmt: mypy.nodes.BreakStmt) -> None: self._unsupported_stmt("break", stmt) def visit_continue_stmt(self, stmt: mypy.nodes.ContinueStmt) -> None: self._unsupported_stmt("continue", stmt) def visit_assert_stmt(self, stmt: mypy.nodes.AssertStmt) -> None: self._unsupported_stmt("assert", stmt) def visit_del_stmt(self, stmt: mypy.nodes.DelStmt) -> None: self._unsupported_stmt("del", stmt) def visit_match_stmt(self, stmt: mypy.nodes.MatchStmt) -> None: self._unsupported_stmt("match", stmt) def visit_type_alias_stmt(self, stmt: mypy.nodes.TypeAliasStmt) -> None: self._unsupported_stmt("type", stmt)
algorandfoundation/puya
src/puyapy/awst_build/arc4_client.py
Python
NOASSERTION
6,711
import re import textwrap from collections.abc import Iterable, Sequence from pathlib import Path from puya import ( arc56_models as arc56, log, ) from puya.avm import OnCompletionAction from puya.errors import CodeError from puya.utils import make_path_relative_to_cwd from puyapy.awst_build import constants from puyapy.awst_build.arc4_utils import arc4_to_pytype logger = log.get_logger(__name__) _AUTO_GENERATED_COMMENT = "# This file is auto-generated, do not modify" _INDENT = " " * 4 _NON_ALPHA_NUMERIC = re.compile(r"\W+") def write_arc4_client(contract: arc56.Contract, out_dir: Path) -> None: stub_path = out_dir / f"client_{contract.name}.py" if _can_overwrite_auto_generated_file(stub_path): logger.info(f"writing {make_path_relative_to_cwd(stub_path)}") stub_text = _ClientGenerator.generate(contract) stub_path.write_text(stub_text) else: logger.error( f"Not outputting {make_path_relative_to_cwd(stub_path)} " "since content does not appear to be auto-generated" ) def _can_overwrite_auto_generated_file(path: Path) -> bool: return not path.exists() or path.read_text().startswith(_AUTO_GENERATED_COMMENT) class _ClientGenerator: def __init__(self, contract: arc56.Contract): self.contract = contract self.python_methods = set[str]() self.struct_to_class = dict[str, str]() self.reserved_class_names = set[str]() self.reserved_method_names = set[str]() self.class_decls = list[str]() @classmethod def generate(cls, contract: arc56.Contract) -> str: return cls(contract)._gen() # noqa: SLF001 def _gen(self) -> str: # generate class definitions for any referenced structs in methods # don't generate from self.contract.structs as it may contain other struct definitions client_class = self._unique_class(self.contract.name) for method in self.contract.methods: for struct in filter(None, (method.returns.struct, *(a.struct for a in method.args))): if struct not in self.struct_to_class and ( struct_def := self.contract.structs.get(struct) ): self._prepare_struct_class(struct, struct_def) return "\n".join( ( _AUTO_GENERATED_COMMENT, "# flake8: noqa", # this works for flake8 and ruff "# fmt: off", # disable formatting" "import typing", "", "import algopy", "", *self.class_decls, "", f"class {client_class}(algopy.arc4.ARC4Client, typing.Protocol):", *_docstring(self.contract.desc), *self._gen_methods(), ) ) def _prepare_struct_class(self, name: str, fields: Sequence[arc56.StructField]) -> str: python_name = self._unique_class(name) self.struct_to_class[name] = python_name lines = [f"class {python_name}(algopy.arc4.Struct):"] for field in fields: if isinstance(field.type, str): typ = self._get_client_type(field.type) else: # generate anonymous struct type anon_struct = f"{name}_{field.name}" typ = self._prepare_struct_class(anon_struct, field.type) lines.append(_indent(f"{field.name}: {typ}")) if self.class_decls: self.class_decls.append("") self.class_decls.extend(lines) return python_name def _get_client_type(self, typ: str) -> str: # map ABI / AVM type to algopy type if typ == arc56.AVMType.uint64: return "algopy.UInt64" elif typ == arc56.AVMType.bytes: return "algopy.Bytes" elif struct := self.contract.structs.get(typ): try: # use existing definition return self.struct_to_class[typ] except KeyError: # generate and return class name return self._prepare_struct_class(typ, struct) else: return str(arc4_to_pytype(typ, None)) def _unique_class(self, name: str) -> str: base_name = name = _get_python_safe_name(name) seq = 1 while name in self.reserved_class_names: seq += 1 name = f"{base_name}{seq}" self.reserved_class_names.add(name) return name def _unique_method(self, name: str) -> str: base_name = name = _get_python_safe_name(name) seq = 1 while name in self.reserved_method_names: seq += 1 name = f"{base_name}{seq}" self.reserved_method_names.add(name) return name def _gen_methods(self) -> Iterable[str]: if not self.contract.methods: yield _indent("pass") yield "" else: for method in self.contract.methods: yield self._gen_method(method) def _gen_method(self, method: arc56.Method) -> str: return_type = self._get_client_type(method.returns.struct or method.returns.type) python_method = self._unique_method(method.name) return _indent( ( _arc4_method_to_decorator(python_method, method), f"def {python_method}(", _indent( ( "self,", *(self._gen_arg(arg) for arg in method.args), ) ), f") -> {return_type}:" + ("" if method.desc else " ..."), *_docstring(method.desc), "", ) ) def _gen_arg(self, arg: arc56.MethodArg) -> str: python_type = self._get_client_type(arg.struct or arg.type) return f"{arg.name}: {python_type}," def _docstring(desc: str | None) -> list[str]: if desc is None: return [] return _indent( [ '"""', *desc.splitlines(), '"""', ] ).splitlines() def _arc4_method_to_decorator(python_method: str, method: arc56.Method) -> str: abimethod_args = dict[str, object]() if method.name != python_method: abimethod_args["name"] = method.name if method.readonly: abimethod_args["readonly"] = True if not _compatible_actions(method.actions.create, method.actions.call): # TODO: support this, once decorators support it raise CodeError( f"unsupported on completion combination for generating an ARC4 client" f" for method: {method.name}" ) actions = sorted( {*method.actions.create, *method.actions.call}, key=lambda a: OnCompletionAction[a] ) if set(actions) != {OnCompletionAction.NoOp.name}: abimethod_args["allow_actions"] = actions if method.actions.create and method.actions.call: abimethod_args["create"] = "allow" elif method.actions.create: abimethod_args["create"] = "require" else: # disallow is default pass kwargs = ", ".join(f"{name}={value!r}" for name, value in abimethod_args.items()) decorator = f"@{constants.ABIMETHOD_DECORATOR_ALIAS}" if kwargs: decorator += f"({kwargs})" return decorator def _compatible_actions(create: Sequence[str], call: Sequence[str]) -> bool: if not create: return True if not call: return True # if both collections are present, then they are compatible if everything in # create is also in call return all(a in call for a in create) def _indent(lines: Iterable[str] | str) -> str: if not isinstance(lines, str): lines = "\n".join(lines) return textwrap.indent(lines, _INDENT) def _get_python_safe_name(name: str) -> str: return _NON_ALPHA_NUMERIC.sub("_", name)
algorandfoundation/puya
src/puyapy/awst_build/arc4_client_gen.py
Python
NOASSERTION
7,941
import typing import mypy.nodes import mypy.visitor from immutabledict import immutabledict from puya import log from puya.avm import OnCompletionAction from puya.awst.nodes import ( ABIMethodArgConstantDefault, ABIMethodArgDefault, ABIMethodArgMemberDefault, ARC4ABIMethodConfig, ARC4BareMethodConfig, ARC4CreateOption, Expression, ) from puya.errors import CodeError, InternalError from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.context import ASTConversionModuleContext from puyapy.awst_build.eb.interface import NodeBuilder from puyapy.awst_build.subroutine import ExpressionASTConverter, require_instance_builder from puyapy.awst_build.utils import extract_decorator_args, get_unaliased_fullname from puyapy.models import ARC4ABIMethodData, ARC4BareMethodData logger = log.get_logger(__name__) def get_arc4_baremethod_data( context: ASTConversionModuleContext, decorator: mypy.nodes.Expression, func_def: mypy.nodes.FuncDef, ) -> ARC4BareMethodData: dec_loc = context.node_location(decorator, func_def.info) pytype, func_types = _get_func_types(context, func_def, dec_loc) if func_types != {"output": pytypes.NoneType}: logger.error("bare methods should have no arguments or return values", location=dec_loc) named_args = _extract_decorator_named_args(context, decorator, dec_loc) evaluated_args = {n: _parse_decorator_arg(context, n, a) for n, a in named_args.items()} create = _extract_create_option(evaluated_args, dec_loc) allowed_completion_types = _extract_allow_actions_option(evaluated_args, dec_loc) if evaluated_args: logger.error( f"unexpected parameters: {', '.join(map(str, evaluated_args))}", location=dec_loc ) return ARC4BareMethodData( member_name=func_def.name, pytype=pytype, config=ARC4BareMethodConfig( allowed_completion_types=allowed_completion_types, create=create, source_location=dec_loc, ), source_location=dec_loc, ) _READONLY = "readonly" _CLIENT_DEFAULTS = "default_args" _ALLOWED_ACTIONS = "allow_actions" _CREATE_OPTIONS = "create" _NAME_OVERRIDE = "name" def get_arc4_abimethod_data( context: ASTConversionModuleContext, decorator: mypy.nodes.Expression, func_def: mypy.nodes.FuncDef, ) -> ARC4ABIMethodData: dec_loc = context.node_location(decorator, func_def.info) pytype, func_types = _get_func_types(context, func_def, dec_loc) named_args = _extract_decorator_named_args(context, decorator, dec_loc) evaluated_args = {n: _parse_decorator_arg(context, n, a) for n, a in named_args.items()} create = _extract_create_option(evaluated_args, dec_loc) allowed_completion_types = _extract_allow_actions_option(evaluated_args, dec_loc) # map "name" param name = func_def.name match evaluated_args.pop(_NAME_OVERRIDE, None): case None: pass case str(name): pass case invalid_name: context.error(f"invalid name option: {invalid_name}", dec_loc) # map "readonly" param default_readonly = False match evaluated_args.pop(_READONLY, default_readonly): case bool(readonly): pass case invalid_readonly_option: context.error(f"invalid readonly option: {invalid_readonly_option}", dec_loc) readonly = default_readonly # map "default_args" param default_args = dict[str, ABIMethodArgDefault]() match evaluated_args.pop(_CLIENT_DEFAULTS, {}): case {**options}: method_arg_names = func_types.keys() - {"output"} for parameter, value in options.items(): if parameter not in method_arg_names: context.error( f"{parameter!r} is not a parameter of {func_def.fullname}", dec_loc, ) else: # if it's in method_arg_names, it's a str assert isinstance(parameter, str) if isinstance(value, str): default_args[parameter] = ABIMethodArgMemberDefault(name=value) elif isinstance(value, Expression): default_args[parameter] = ABIMethodArgConstantDefault(value=value) else: context.error(f"invalid default_args value: {value!r}", dec_loc) case invalid_default_args_option: context.error(f"invalid default_args option: {invalid_default_args_option}", dec_loc) config = ARC4ABIMethodConfig( source_location=dec_loc, allowed_completion_types=allowed_completion_types, create=create, name=name, readonly=readonly, default_args=immutabledict(default_args), ) return ARC4ABIMethodData( member_name=func_def.name, pytype=pytype, config=config, signature=func_types, source_location=dec_loc, ) def _get_func_types( context: ASTConversionModuleContext, func_def: mypy.nodes.FuncDef, location: SourceLocation ) -> tuple[pytypes.FuncType, dict[str, pytypes.PyType]]: if func_def.type is None: raise CodeError("typing error", location) func_type = context.type_to_pytype( func_def.type, source_location=context.node_location(func_def, module_src=func_def.info) ) if not isinstance(func_type, pytypes.FuncType): raise InternalError( f"unexpected type result for ABI function definition type: {type(func_type).__name__}", location, ) def require_arg_name(arg: pytypes.FuncArg) -> str: if arg.name is None: raise CodeError( "positional only arguments are not supported with ARC-4 methods", location ) return arg.name result = {require_arg_name(arg): arg.type for arg in func_type.args} if "output" in result: # https://github.com/algorandfoundation/ARCs/blob/main/assets/arc-0032/application.schema.json raise CodeError( "for compatibility with ARC-32, ARC-4 methods cannot have an argument named output", location, ) result["output"] = func_type.ret_type return func_type, result def _extract_decorator_named_args( context: ASTConversionModuleContext, decorator: mypy.nodes.Expression, location: SourceLocation ) -> dict[str, mypy.nodes.Expression]: result = {} for name, value in extract_decorator_args(decorator, location): if name is None: logger.error("unexpected positional argument", location=context.node_location(value)) elif name in result: logger.error("duplicate named argument", location=context.node_location(value)) else: result[name] = value return result def _parse_decorator_arg( context: ASTConversionModuleContext, name: str, value: mypy.nodes.Expression ) -> object: visitor = _ARC4DecoratorArgEvaluator(context, name) return value.accept(visitor) class _ARC4DecoratorArgEvaluator(mypy.visitor.NodeVisitor[object]): def __init__(self, context: ASTConversionModuleContext, arg_name: str): self.context = context self.arg_name = arg_name def __getattribute__(self, name: str) -> object: attr = super().__getattribute__(name) if name.startswith("visit_") and not attr.__module__.startswith("puyapy."): return self._not_supported return attr def _not_supported(self, o: mypy.nodes.Context) -> typing.Never: raise CodeError("unexpected argument type", self.context.node_location(o)) def _resolve_constant_reference(self, expr: mypy.nodes.RefExpr) -> object: try: return self.context.constants[expr.fullname] except KeyError: raise CodeError( f"Unresolved module constant: {expr.fullname}", self.context.node_location(expr) ) from None @typing.override def visit_call_expr(self, o: mypy.nodes.CallExpr) -> Expression: return _parse_expression(self.context, o) @typing.override def visit_str_expr(self, o: mypy.nodes.StrExpr) -> str: return o.value @typing.override def visit_name_expr(self, o: mypy.nodes.NameExpr) -> object: if self.arg_name == _READONLY: if o.fullname == "builtins.True": return True if o.fullname == "builtins.False": return False elif self.arg_name == _CLIENT_DEFAULTS: if isinstance(o.node, mypy.nodes.Decorator): return o.name # assume abimethod return self._resolve_constant_reference(o) @typing.override def visit_member_expr(self, o: mypy.nodes.MemberExpr) -> object: if self.arg_name == _ALLOWED_ACTIONS and isinstance(o.expr, mypy.nodes.RefExpr): unaliased_base_fullname = get_unaliased_fullname(o.expr) if unaliased_base_fullname == pytypes.OnCompleteActionType.name: try: OnCompletionAction[o.name] except KeyError: raise CodeError( f"unable to resolve constant value for {unaliased_base_fullname}.{o.name}", self.context.node_location(o), ) from None else: return o.name return self._resolve_constant_reference(o) @typing.override def visit_unary_expr(self, o: mypy.nodes.UnaryExpr) -> object: if self.arg_name == _READONLY: operand = o.expr.accept(self) if o.op == "not": return not operand elif self.arg_name == _CLIENT_DEFAULTS: return _parse_expression(self.context, o) self._not_supported(o) @typing.override def visit_list_expr(self, o: mypy.nodes.ListExpr) -> object: if self.arg_name == _ALLOWED_ACTIONS: return [item.accept(self) for item in o.items] self._not_supported(o) @typing.override def visit_tuple_expr(self, o: mypy.nodes.TupleExpr) -> object: if self.arg_name == _ALLOWED_ACTIONS: return tuple(item.accept(self) for item in o.items) elif self.arg_name == _CLIENT_DEFAULTS: return _parse_expression(self.context, o) self._not_supported(o) @typing.override def visit_dict_expr(self, o: mypy.nodes.DictExpr) -> dict[object, object]: return {key.accept(self) if key else None: value.accept(self) for key, value in o.items} def _parse_expression( context: ASTConversionModuleContext, node: mypy.nodes.Expression ) -> Expression: converter = _ConstantExpressionASTConverter(context) node_builder = node.accept(converter) instance_builder = require_instance_builder(node_builder) return instance_builder.resolve() class _ConstantExpressionASTConverter(ExpressionASTConverter): @typing.override def resolve_local_type(self, var_name: str, expr_loc: SourceLocation) -> pytypes.PyType | None: raise CodeError("local variables not supported in decorators", expr_loc) @typing.override def builder_for_self(self, expr_loc: SourceLocation) -> NodeBuilder: raise InternalError("self variable outside of method", expr_loc) @typing.override def visit_super_expr(self, o: mypy.nodes.SuperExpr) -> typing.Never: raise CodeError("super expressions not supported in decorators", self._location(o)) @typing.override def visit_assignment_expr(self, o: mypy.nodes.AssignmentExpr) -> typing.Never: raise CodeError("assignment expressions not supported in decorators", self._location(o)) def _extract_create_option( evaluated_args: dict[str, object], location: SourceLocation ) -> ARC4CreateOption: default_value = ARC4CreateOption.disallow option_name = evaluated_args.pop(_CREATE_OPTIONS, default_value.name) try: return ARC4CreateOption[option_name] # type: ignore[misc] except KeyError: logger.error( # noqa: TRY400 f"invalid create option value: {option_name}", location=location ) return default_value def _extract_allow_actions_option( evaluated_args: dict[str, object], location: SourceLocation ) -> list[OnCompletionAction]: allowed_completion_types = [] match evaluated_args.pop(_ALLOWED_ACTIONS, None): case None: pass case []: logger.error("empty allow_actions", location=location) case [*allow_actions]: for a in allow_actions: oca = _allowed_oca(a) if oca is None: logger.error(f"invalid allow action: {a}", location=location) elif oca in allowed_completion_types: logger.error(f"duplicate value in allow_actions: {a}", location=location) else: allowed_completion_types.append(oca) case invalid_allow_actions_option: logger.error( f"invalid allow_actions option: {invalid_allow_actions_option}", location=location ) # defaults set last in case of one or more errors above return allowed_completion_types or [OnCompletionAction.NoOp] def _allowed_oca(name: object) -> OnCompletionAction | None: if not isinstance(name, str): return None try: result = OnCompletionAction[name] except KeyError: return None if result is OnCompletionAction.ClearState: return None return result
algorandfoundation/puya
src/puyapy/awst_build/arc4_decorators.py
Python
NOASSERTION
13,698
import re import typing from collections.abc import Callable, Iterable import attrs import mypy.nodes import mypy.visitor from puya import log from puya.avm import TransactionType from puya.awst import wtypes from puya.errors import CodeError, InternalError from puya.parse import SourceLocation from puyapy.awst_build import pytypes __all__ = [ "arc4_to_pytype", "pytype_to_arc4", "pytype_to_arc4_pytype", ] logger = log.get_logger(__name__) def _is_arc4_struct(typ: pytypes.PyType) -> typing.TypeGuard[pytypes.StructType]: if not (pytypes.ARC4StructBaseType < typ): return False if not isinstance(typ, pytypes.StructType): raise InternalError( f"Type inherits from {pytypes.ARC4StructBaseType!r}" f" but structure type is {type(typ).__name__!r}" ) return True @attrs.frozen class _DecoratorData: fullname: str args: list[tuple[str | None, mypy.nodes.Expression]] source_location: SourceLocation def pytype_to_arc4_pytype( pytype: pytypes.PyType, on_error: Callable[[pytypes.PyType], pytypes.PyType], ) -> pytypes.PyType: match pytype: case pytypes.BoolType: return pytypes.ARC4BoolType case pytypes.NamedTupleType(): return pytypes.StructType( base=pytypes.ARC4StructBaseType, desc=pytype.desc, name=pytype.name, fields={ name: pytype_to_arc4_pytype(t, on_error) for name, t in pytype.fields.items() }, frozen=True, source_location=pytype.source_location, ) case pytypes.TupleType(): return pytypes.GenericARC4TupleType.parameterise( [pytype_to_arc4_pytype(t, on_error) for t in pytype.items], pytype.source_location ) case pytypes.NoneType | pytypes.GroupTransactionType(): return pytype if pytypes.UInt64Type <= pytype: return pytypes.ARC4UIntN_Aliases[64] elif pytypes.BigUIntType <= pytype: return pytypes.ARC4UIntN_Aliases[512] elif pytypes.BytesType <= pytype: return pytypes.ARC4DynamicBytesType elif pytypes.StringType <= pytype: return pytypes.ARC4StringType elif pytype.is_type_or_subtype( pytypes.ApplicationType, pytypes.AssetType, pytypes.AccountType ) or isinstance(pytype.wtype, wtypes.ARC4Type): return pytype else: return on_error(pytype) _UINT_REGEX = re.compile(r"^uint(?P<n>[0-9]+)$") _UFIXED_REGEX = re.compile(r"^ufixed(?P<n>[0-9]+)x(?P<m>[0-9]+)$") _FIXED_ARRAY_REGEX = re.compile(r"^(?P<type>.+)\[(?P<size>[0-9]+)]$") _DYNAMIC_ARRAY_REGEX = re.compile(r"^(?P<type>.+)\[]$") _TUPLE_REGEX = re.compile(r"^\((?P<types>.+)\)$") _ARC4_PYTYPE_MAPPING = { "bool": pytypes.ARC4BoolType, "string": pytypes.ARC4StringType, "account": pytypes.AccountType, "application": pytypes.ApplicationType, "asset": pytypes.AssetType, "void": pytypes.NoneType, "txn": pytypes.GroupTransactionTypes[None], **{t.name: pytypes.GroupTransactionTypes[t] for t in TransactionType}, "address": pytypes.ARC4AddressType, "byte": pytypes.ARC4ByteType, "byte[]": pytypes.ARC4DynamicBytesType, } def arc4_to_pytype(typ: str, location: SourceLocation | None = None) -> pytypes.PyType: if known_typ := _ARC4_PYTYPE_MAPPING.get(typ): return known_typ if uint := _UINT_REGEX.match(typ): n = int(uint.group("n")) n_typ = pytypes.TypingLiteralType(value=n, source_location=None) if n <= 64: return pytypes.GenericARC4UIntNType.parameterise([n_typ], location) else: return pytypes.GenericARC4BigUIntNType.parameterise([n_typ], location) if ufixed := _UFIXED_REGEX.match(typ): n, m = map(int, ufixed.group("n", "m")) n_typ = pytypes.TypingLiteralType(value=n, source_location=None) m_typ = pytypes.TypingLiteralType(value=m, source_location=None) if n <= 64: return pytypes.GenericARC4UFixedNxMType.parameterise([n_typ, m_typ], location) else: return pytypes.GenericARC4BigUFixedNxMType.parameterise([n_typ, m_typ], location) if fixed_array := _FIXED_ARRAY_REGEX.match(typ): arr_type, size_str = fixed_array.group("type", "size") size = int(size_str) size_typ = pytypes.TypingLiteralType(value=size, source_location=None) element_type = arc4_to_pytype(arr_type, location) return pytypes.GenericARC4StaticArrayType.parameterise([element_type, size_typ], location) if dynamic_array := _DYNAMIC_ARRAY_REGEX.match(typ): arr_type = dynamic_array.group("type") element_type = arc4_to_pytype(arr_type, location) return pytypes.GenericARC4DynamicArrayType.parameterise([element_type], location) if tuple_match := _TUPLE_REGEX.match(typ): tuple_types = [ arc4_to_pytype(x, location) for x in split_tuple_types(tuple_match.group("types")) ] return pytypes.GenericARC4TupleType.parameterise(tuple_types, location) raise CodeError(f"unknown ARC4 type '{typ}'", location) def pytype_to_arc4(typ: pytypes.PyType, loc: SourceLocation | None = None) -> str: def on_error(bad_type: pytypes.PyType) -> typing.Never: raise CodeError( f"not an ARC4 type or native equivalent: {bad_type}", loc or getattr(bad_type, "source_location", None), ) arc4_pytype = pytype_to_arc4_pytype(typ, on_error) match arc4_pytype: case pytypes.NoneType: return "void" case pytypes.AssetType: return "asset" case pytypes.AccountType: return "account" case pytypes.ApplicationType: return "application" case pytypes.TransactionRelatedType(transaction_type=transaction_type): return transaction_type.name if transaction_type else "txn" wtype = arc4_pytype.wtype if not isinstance(wtype, wtypes.ARC4Type): raise CodeError(f"not an ARC4 type or native equivalent: {wtype}", loc) return wtype.arc4_name def split_tuple_types(types: str) -> Iterable[str]: """Splits inner tuple types into individual elements. e.g. "uint64,(uint8,string),bool" becomes ["uint64", "(uint8,string)", "bool"] """ tuple_level = 0 last_idx = 0 for idx, tok in enumerate(types): if tok == "(": tuple_level += 1 elif tok == ")": tuple_level -= 1 if tok == "," and tuple_level == 0: yield types[last_idx:idx] last_idx = idx + 1 yield types[last_idx:]
algorandfoundation/puya
src/puyapy/awst_build/arc4_utils.py
Python
NOASSERTION
6,689
import abc import typing import mypy.checker import mypy.nodes import mypy.visitor from puya.errors import CodeError, InternalError from puya.parse import SourceLocation from puyapy.awst_build.context import ASTConversionModuleContext from puyapy.awst_build.exceptions import UnsupportedASTError from puyapy.awst_build.utils import refers_to_fullname class _BaseMyPyVisitor: def __init__(self, context: ASTConversionModuleContext): self.context = context def _location(self, node: mypy.nodes.Context) -> SourceLocation: return self.context.node_location(node) def _error(self, msg: str, location: mypy.nodes.Context | SourceLocation) -> None: self.context.error(msg, location) def _precondition( self, condition: bool, # noqa: FBT001 /, msg: str, location: mypy.nodes.Context | SourceLocation, ) -> None: if not condition: raise InternalError( msg, self._location(location) if isinstance(location, mypy.nodes.Context) else location, ) def _unsupported_node(self, node: mypy.nodes.Context, details: str) -> typing.Never: raise UnsupportedASTError(node, self._location(node), details=details) class BaseMyPyStatementVisitor[_TStatement]( _BaseMyPyVisitor, mypy.visitor.StatementVisitor[_TStatement], abc.ABC, ): # ~~~ things we can just ignore ~~~ # @typing.override def visit_pass_stmt(self, o: mypy.nodes.PassStmt) -> _TStatement: return self.empty_statement(o) @typing.override def visit_import(self, o: mypy.nodes.Import) -> _TStatement: return self.empty_statement(o) @typing.override def visit_import_from(self, o: mypy.nodes.ImportFrom) -> _TStatement: return self.empty_statement(o) @typing.override def visit_import_all(self, o: mypy.nodes.ImportAll) -> _TStatement: return self.empty_statement(o) @abc.abstractmethod def empty_statement(self, stmt: mypy.nodes.Statement) -> _TStatement: ... # ~~~ simplify function (decorated, overloaded, normal) visitation ~~~ # def check_fatal_decorators(self, exprs: list[mypy.nodes.Expression]) -> None: for dec_expr in exprs: if isinstance(dec_expr, mypy.nodes.CallExpr): dec_expr = dec_expr.callee if isinstance(dec_expr, mypy.nodes.RefExpr): if refers_to_fullname( dec_expr, "typing.no_type_check", "typing_extensions.no_type_check" ): raise CodeError( "no_type_check is not supported -" " type checking is required for compilation", self._location(dec_expr), ) else: self.context.warning( "Unable to determine full name of expression", self._location(dec_expr) ) @typing.final @typing.override def visit_func_def(self, fdef: mypy.nodes.FuncDef) -> _TStatement: self._precondition( not fdef.is_decorated, "Decorated functions should have been visited via visit_decorator", fdef, ) self._precondition(not fdef.is_property, "Property function that is not decorated??", fdef) return self._do_function(fdef, decorator=None) @typing.final @typing.override def visit_decorator(self, dec: mypy.nodes.Decorator) -> _TStatement: self.check_fatal_decorators(dec.decorators) if mypy.checker.is_property(dec): self._unsupported_node(dec, "property decorator/descriptor not supported currently") return self._do_function(dec.func, dec) @typing.final @typing.override def visit_overloaded_func_def(self, o: mypy.nodes.OverloadedFuncDef) -> _TStatement: # This could either be a @typing.overload, in which case o.impl will contain # the actual function, or it could be a @property with a setter and/or a deleter, # in which case o.impl will be None if mypy.checker.is_property(o): self._unsupported_node(o, "property decorator/descriptor not supported currently") if o.impl: self.context.warning( "@typing.overload() should not be required, " "and may not function exactly as intended", o, ) return o.impl.accept(self) else: # typing.overload sequences should always have an implementation, # unless they're in a stub file - but we don't process those, # so we shouldn't get here raise CodeError( "An overloaded function outside a stub file must have an implementation", self._location(o), ) def _do_function( self, fdef: mypy.nodes.FuncDef, decorator: mypy.nodes.Decorator | None, ) -> _TStatement: self._precondition( not fdef.is_mypy_only, "function is defined in TYPE_CHECKING block", fdef ) # we shouldn't get here if fdef.is_generator: self._unsupported_node(fdef, "generator functions are not supported") if fdef.is_coroutine or fdef.is_awaitable_coroutine or fdef.is_async_generator: self._unsupported_node(fdef, "async functions are not supported") if fdef.dataclass_transform_spec is not None: self._unsupported_node(fdef, "data class transforms (PEP-681) are not supported ") return self.visit_function(fdef, decorator) @abc.abstractmethod def visit_function( self, fdef: mypy.nodes.FuncDef, decorator: mypy.nodes.Decorator | None, ) -> _TStatement: ... # ~~~ unsupported scope modifiers ~~~ # @typing.override def visit_global_decl(self, stmt: mypy.nodes.GlobalDecl) -> typing.Never: self._unsupported_node(stmt, "global variables must be immutable") # TODO: do we reject nonlocal here too? are nested functions in/out? @typing.override def visit_nonlocal_decl(self, o: mypy.nodes.NonlocalDecl) -> typing.Never: self._unsupported_node(o, "nested functions are not supported") # ~~~ raising and handling exceptions unsupported ~~~ # @typing.override def visit_raise_stmt(self, stmt: mypy.nodes.RaiseStmt) -> typing.Never: self._unsupported_node(stmt, "exception raising and exception handling not supported") @typing.override def visit_try_stmt(self, stmt: mypy.nodes.TryStmt) -> typing.Never: self._unsupported_node(stmt, "exception raising and exception handling not supported") @typing.override def visit_with_stmt(self, stmt: mypy.nodes.WithStmt) -> typing.Never: self._unsupported_node( stmt, "context managers are redundant due to a lack of exception support", ) class BaseMyPyExpressionVisitor[_TExpression]( _BaseMyPyVisitor, mypy.visitor.ExpressionVisitor[_TExpression], abc.ABC, ): # ~~~ things that we support but shouldn't encounter via visitation ~~~ # @typing.override def visit_star_expr(self, expr: mypy.nodes.StarExpr) -> _TExpression: # star expression examples (non-exhaustive): # head, *tail = my_list # my_func(first, *rest) # [prepend, *existing] raise InternalError( "star expressions should be handled at a higher level", self._location(expr) ) @typing.override def visit_dictionary_comprehension( self, expr: mypy.nodes.DictionaryComprehension ) -> typing.Never: self._unsupported_node(expr, "dictionaries are not supported") @typing.override def visit_set_expr(self, expr: mypy.nodes.SetExpr) -> typing.Never: self._unsupported_node(expr, "sets are not supported") @typing.override def visit_set_comprehension(self, expr: mypy.nodes.SetComprehension) -> typing.Never: self._unsupported_node(expr, "sets are not supported") # ~~~ math we don't support (yet?) ~~~ # @typing.override def visit_float_expr(self, expr: mypy.nodes.FloatExpr) -> typing.Never: self._unsupported_node(expr, "floating point math is not supported") @typing.override def visit_complex_expr(self, expr: mypy.nodes.ComplexExpr) -> typing.Never: self._unsupported_node(expr, "complex math is not supported") # ~~~ generator functions unsupported ~~~ # @typing.override def visit_generator_expr(self, expr: mypy.nodes.GeneratorExpr) -> typing.Never: self._unsupported_node(expr, "generator functions are not supported") @typing.override def visit_yield_expr(self, expr: mypy.nodes.YieldExpr) -> typing.Never: self._unsupported_node(expr, "generator functions are not supported") @typing.override def visit_yield_from_expr(self, o: mypy.nodes.YieldFromExpr) -> typing.Never: self._unsupported_node(o, "generator functions are not supported") # ~~~ async/await functions unsupported ~~~ # @typing.override def visit_await_expr(self, o: mypy.nodes.AwaitExpr) -> typing.Never: self._unsupported_node(o, "async/await is not supported") # ~~~ analysis-only expressions, should never show up ~~~ # @typing.override def visit_temp_node(self, expr: mypy.nodes.TempNode) -> _TExpression: # "dummy node" raise InternalError( "Placeholder expression node encountered, should be handled at a higher level", self._location(expr), ) def __analysis_only(self, expr: mypy.nodes.Expression) -> typing.Never: raise InternalError( f"Can't compile analysis-only expression of type {type(expr).__name__}", self._location(expr), ) @typing.override def visit_cast_expr(self, expr: mypy.nodes.CastExpr) -> _TExpression: # NOTE: only appears as CallExpr.analyzed return self.__analysis_only(expr) @typing.override def visit_assert_type_expr(self, expr: mypy.nodes.AssertTypeExpr) -> _TExpression: # NOTE: only appears as CallExpr.analyzed return self.__analysis_only(expr) @typing.override def visit_enum_call_expr(self, expr: mypy.nodes.EnumCallExpr) -> _TExpression: # NOTE: only appears as CallExpr.analyzed return self.__analysis_only(expr) @typing.override def visit__promote_expr(self, expr: mypy.nodes.PromoteExpr) -> _TExpression: # NOTE: only appears as CallExpr.analyzed return self.__analysis_only(expr) @typing.override def visit_namedtuple_expr(self, expr: mypy.nodes.NamedTupleExpr) -> _TExpression: # NOTE: only appears as (ClassDef|CallExpr).analyzed return self.__analysis_only(expr) @typing.override def visit_newtype_expr(self, expr: mypy.nodes.NewTypeExpr) -> _TExpression: # NOTE: only appears as CallExpr.analyzed return self.__analysis_only(expr) @typing.override def visit_type_alias_expr(self, expr: mypy.nodes.TypeAliasExpr) -> _TExpression: # NOTE: only appears as (IndexExpr|CallExpr|OpExpr).analyzed return self.__analysis_only(expr) @typing.override def visit_type_application(self, expr: mypy.nodes.TypeApplication) -> _TExpression: # NOTE: only appears as IndexExpr.analyzed return self.__analysis_only(expr) @typing.override def visit_type_var_expr(self, expr: mypy.nodes.TypeVarExpr) -> _TExpression: # NOTE: appears as CallExpr.analyzed OR as SymbolTableNode.node return self.__analysis_only(expr) @typing.override def visit_paramspec_expr(self, expr: mypy.nodes.ParamSpecExpr) -> _TExpression: # NOTE: only appears as CallExpr.analyzed return self.__analysis_only(expr) @typing.override def visit_type_var_tuple_expr(self, expr: mypy.nodes.TypeVarTupleExpr) -> _TExpression: # NOTE: only appears as CallExpr.analyzed return self.__analysis_only(expr) @typing.override def visit_typeddict_expr(self, expr: mypy.nodes.TypedDictExpr) -> _TExpression: # NOTE: only appears as (ClassDef|CallExpr).analyzed return self.__analysis_only(expr) @typing.override def visit_reveal_expr(self, expr: mypy.nodes.RevealExpr) -> _TExpression: # NOTE: only appears as CallExpr.analyzed return self.__analysis_only(expr)
algorandfoundation/puya
src/puyapy/awst_build/base_mypy_visitor.py
Python
NOASSERTION
12,472
ARC4_CONTRACT_BASE = "algopy.arc4.ARC4Contract" CONTRACT_BASE = "algopy._contract.Contract" STRUCT_META = "algopy._struct._StructMeta" SUBROUTINE_HINT = "algopy._hints.subroutine" LOGICSIG_DECORATOR = "algopy._logic_sig.logicsig" LOGICSIG_DECORATOR_ALIAS = "algopy.logicsig" SUBROUTINE_HINT_ALIAS = "algopy.subroutine" ABIMETHOD_DECORATOR = "algopy.arc4.abimethod" ABIMETHOD_DECORATOR_ALIAS = ABIMETHOD_DECORATOR BAREMETHOD_DECORATOR = "algopy.arc4.baremethod" BAREMETHOD_DECORATOR_ALIAS = BAREMETHOD_DECORATOR APPROVAL_METHOD = "approval_program" CLEAR_STATE_METHOD = "clear_state_program" ALGOPY_OP_PREFIX = "algopy.op." URANGE = "algopy._unsigned_builtins.urange" CLS_ARC4_STRUCT_META = "algopy.arc4._StructMeta" CLS_ARC4_ABI_CALL = "algopy.arc4.abi_call"
algorandfoundation/puya
src/puyapy/awst_build/constants.py
Python
NOASSERTION
759
import ast import contextlib import functools from collections.abc import Iterator, Mapping, Sequence from pathlib import Path import attrs import mypy.nodes import mypy.options import mypy.types from puya import log from puya.context import try_get_source from puya.errors import CodeError, InternalError, log_exceptions from puya.parse import SourceLocation from puya.program_refs import ContractReference from puya.utils import attrs_extend, unique from puyapy.awst_build import pytypes from puyapy.models import ConstantValue, ContractFragmentBase from puyapy.parse import ParseResult, source_location_from_mypy logger = log.get_logger(__name__) @attrs.frozen(kw_only=True) class ASTConversionContext: _parse_result: ParseResult constants: dict[str, ConstantValue] = attrs.field(factory=dict) _pytypes: dict[str, pytypes.PyType] = attrs.field(factory=pytypes.builtins_registry) _contract_fragments: dict[ContractReference, ContractFragmentBase] = attrs.field(factory=dict) @property def mypy_options(self) -> mypy.options.Options: return self._parse_result.mypy_options @property def contract_fragments(self) -> Mapping[ContractReference, ContractFragmentBase]: return self._contract_fragments def add_contract_fragment(self, fragment: ContractFragmentBase) -> None: assert ( fragment.id not in self._contract_fragments ), "attempted to add contract fragment twice" self._contract_fragments[fragment.id] = fragment def for_module(self, module_path: Path) -> "ASTConversionModuleContext": return attrs_extend(ASTConversionModuleContext, self, module_path=module_path) def register_pytype(self, typ: pytypes.PyType, *, alias: str | None = None) -> None: name = alias or typ.name existing_entry = self._pytypes.get(name) if existing_entry is typ: logger.debug(f"Duplicate registration of {typ}") else: if existing_entry is not None: logger.error(f"Redefinition of type {name}") self._pytypes[name] = typ def lookup_pytype(self, name: str) -> pytypes.PyType | None: """Lookup type by the canonical fully qualified name""" return self._pytypes.get(name) def require_ptype(self, name: str, source_location: SourceLocation) -> pytypes.PyType: try: return self._pytypes[name] except KeyError: raise CodeError(f"Unknown type {name}", source_location) from None @attrs.frozen(kw_only=True) class ASTConversionModuleContext(ASTConversionContext): module_path: Path def node_location( self, node: mypy.nodes.Context, module_src: mypy.nodes.TypeInfo | None = None, ) -> SourceLocation: if not module_src: module_path = self.module_path else: module_name = module_src.module_name try: module_path = self._parse_result.ordered_modules[module_name].path except KeyError as ex: raise CodeError(f"could not find module '{module_name}'") from ex loc = source_location_from_mypy(file=module_path, node=node) # if not at start of file, try and expand to preceding comment lines, if loc.line > 1: prior_code = try_get_source( self._parse_result.sources_by_path, SourceLocation(file=module_path, line=1, end_line=loc.line - 1), ) comment_lines_count = 0 for line in reversed(prior_code or []): if not line.strip().startswith("#"): break comment_lines_count += 1 if comment_lines_count: loc = attrs.evolve(loc, comment_lines=comment_lines_count) # if multi-line, strip trailing blank/comment lines if loc.end_line != loc.line: lines = try_get_source(self._parse_result.sources_by_path, loc) if lines is not None: chop = 0 for line in reversed(lines): l_stripped = line.lstrip() if l_stripped and not l_stripped.startswith("#"): break chop += 1 if chop: loc = attrs.evolve(loc, end_line=loc.end_line - chop, end_column=None) return loc def _maybe_convert_location( self, location: mypy.nodes.Context | SourceLocation ) -> SourceLocation: if isinstance(location, mypy.nodes.Context): return self.node_location(location) return location def error(self, msg: str, location: mypy.nodes.Context | SourceLocation) -> None: logger.error(msg, location=self._maybe_convert_location(location)) def info(self, msg: str, location: mypy.nodes.Context | SourceLocation) -> None: logger.info(msg, location=self._maybe_convert_location(location)) def warning(self, msg: str, location: mypy.nodes.Context | SourceLocation) -> None: logger.warning(msg, location=self._maybe_convert_location(location)) @contextlib.contextmanager def log_exceptions( self, fallback_location: mypy.nodes.Context | SourceLocation ) -> Iterator[None]: with log_exceptions(self._maybe_convert_location(fallback_location)): yield def type_to_pytype( self, mypy_type: mypy.types.Type, *, source_location: mypy.nodes.Context | SourceLocation, in_type_args: bool = False, ) -> pytypes.PyType: return type_to_pytype( self._pytypes, mypy_type, source_location=self._maybe_convert_location(source_location), in_type_args=in_type_args, ) def type_to_pytype( registry: Mapping[str, pytypes.PyType], mypy_type: mypy.types.Type, *, source_location: SourceLocation, in_type_args: bool = False, in_func_sig: bool = False, ) -> pytypes.PyType: loc = source_location proper_type_or_alias: mypy.types.ProperType | mypy.types.TypeAliasType if isinstance(mypy_type, mypy.types.TypeAliasType): proper_type_or_alias = mypy_type else: proper_type_or_alias = mypy.types.get_proper_type(mypy_type) recurse = functools.partial( type_to_pytype, registry, source_location=loc, in_type_args=in_type_args, in_func_sig=in_func_sig, ) match proper_type_or_alias: case mypy.types.TypeAliasType(alias=alias, args=args): if alias is None: raise InternalError("mypy type alias type missing alias reference", loc) result = registry.get(alias.fullname) if result is None: return recurse(mypy.types.get_proper_type(proper_type_or_alias)) return _maybe_parameterise_pytype(registry, result, args, loc) # this is how variadic tuples are represented in mypy types... case mypy.types.Instance(type=mypy.nodes.TypeInfo(fullname="builtins.tuple"), args=args): try: (arg,) = args except ValueError: raise InternalError( f"mypy tuple type as instance had unrecognised args: {args}", loc ) from None if not in_func_sig: raise CodeError("variadic tuples are not supported", loc) return pytypes.VariadicTupleType(items=recurse(arg)) case mypy.types.Instance(args=args) as inst: fullname = inst.type.fullname result = registry.get(fullname) if result is None: if fullname.startswith("builtins."): msg = f"Unsupported builtin type: {fullname.removeprefix('builtins.')}" else: msg = f"Unknown type: {fullname}" raise CodeError(msg, loc) return _maybe_parameterise_pytype(registry, result, args, loc) case mypy.types.TupleType(items=items, partial_fallback=fallback): if not fallback.args: return recurse(fallback) generic = registry.get(fallback.type.fullname) if generic is None: raise CodeError(f"unknown tuple base type: {fallback.type.fullname}", loc) return _maybe_parameterise_pytype(registry, generic, items, loc) case mypy.types.LiteralType(fallback=fallback, value=literal_value) as mypy_literal_type: if not in_type_args: # this is a bit clumsy, but exists because for some reason, bool types # can be "narrowed" down to a typing.Literal. e.g. in the case of: # assert a # assert a or b # then the type of `a or b` becomes typing.Literal[True] return recurse(fallback) if mypy_literal_type.is_enum_literal(): raise CodeError("typing literals of enum are not supported", loc) our_literal_value: pytypes.TypingLiteralValue if fallback.type.fullname == "builtins.bytes": # WHY^2 bytes_literal_value = ast.literal_eval("b" + repr(literal_value)) assert isinstance(bytes_literal_value, bytes) our_literal_value = bytes_literal_value elif isinstance(literal_value, float): # WHY raise CodeError("typing literals with float values are not supported", loc) else: our_literal_value = literal_value return pytypes.TypingLiteralType(value=our_literal_value, source_location=loc) case mypy.types.UnionType(items=items): types = unique(recurse(it) for it in items) if not types: return pytypes.NeverType elif len(types) == 1: return types[0] else: return pytypes.UnionType(types, loc) case mypy.types.NoneType() | mypy.types.PartialType(type=None): return pytypes.NoneType case mypy.types.UninhabitedType(): return pytypes.NeverType case mypy.types.AnyType(type_of_any=type_of_any): msg = _type_of_any_to_error_message(type_of_any, loc) raise CodeError(msg, loc) case mypy.types.TypeType(item=inner_type): inner_pytype = recurse(inner_type) return pytypes.TypeType(inner_pytype) case mypy.types.FunctionLike() as func_like: if func_like.is_type_obj(): # note sure if this will always work for overloads, but the only overloaded # constructor we have is arc4.StaticArray, so... ret_type = func_like.items[0].ret_type cls_typ = recurse(ret_type) return pytypes.TypeType(cls_typ) else: if not isinstance(func_like, mypy.types.CallableType): # vs Overloaded raise CodeError("references to overloaded functions are not supported", loc) ret_pytype = recurse(func_like.ret_type) func_args = [] for at, name, kind in zip( func_like.arg_types, func_like.arg_names, func_like.arg_kinds, strict=True ): arg_pytype = type_to_pytype( registry, at, source_location=loc, in_type_args=in_type_args, in_func_sig=True, ) func_args.append(pytypes.FuncArg(type=arg_pytype, kind=kind, name=name)) if func_like.bound_args: logger.error("function type has bound arguments", location=loc) if func_like.definition is not None: name = func_like.definition.fullname else: name = repr(func_like) if func_like.def_extras.get("first_arg"): _self_arg, *func_args = func_args return pytypes.FuncType( name=name, args=func_args, ret_type=ret_pytype, ) case _: raise CodeError(f"Unable to resolve mypy type {mypy_type!r} to known algopy type", loc) def _maybe_parameterise_pytype( registry: Mapping[str, pytypes.PyType], maybe_generic: pytypes.PyType, mypy_type_args: Sequence[mypy.types.Type], loc: SourceLocation, ) -> pytypes.PyType: if not mypy_type_args: return maybe_generic if all(isinstance(t, mypy.types.TypeVarType | mypy.types.UnpackType) for t in mypy_type_args): return maybe_generic type_args_resolved = [ type_to_pytype(registry, mta, source_location=loc, in_type_args=True) for mta in mypy_type_args ] result = maybe_generic.parameterise(type_args_resolved, loc) return result def _type_of_any_to_error_message(type_of_any: int, source_location: SourceLocation) -> str: from mypy.types import TypeOfAny match type_of_any: case TypeOfAny.unannotated: msg = "type annotation is required at this location" case TypeOfAny.explicit | TypeOfAny.from_another_any: msg = "Any type is not supported" case TypeOfAny.from_unimported_type: msg = "unknown type from import" case TypeOfAny.from_omitted_generics: msg = "type parameters are required at this location" case TypeOfAny.from_error: msg = "typing error prevents type resolution" case TypeOfAny.special_form: msg = "unsupported type form" case TypeOfAny.implementation_artifact | TypeOfAny.suggestion_engine: msg = "mypy cannot handle this type form, try providing an explicit annotation" case _: logger.debug(f"Unknown TypeOfAny value: {type_of_any}", location=source_location) msg = "Any type is not supported" return msg
algorandfoundation/puya
src/puyapy/awst_build/context.py
Python
NOASSERTION
14,065
import abc import contextlib import typing from collections.abc import Callable, Iterator, Sequence, Set import attrs import mypy.nodes import mypy.types import mypy.visitor from puya import log from puya.avm import OnCompletionAction from puya.awst import ( nodes as awst_nodes, wtypes, ) from puya.errors import CodeError, InternalError from puya.parse import SourceLocation from puya.program_refs import ContractReference from puya.utils import StableSet, set_add, unique from puyapy.awst_build import constants, intrinsic_factory, pytypes from puyapy.awst_build.arc4_decorators import get_arc4_abimethod_data, get_arc4_baremethod_data from puyapy.awst_build.base_mypy_visitor import BaseMyPyStatementVisitor from puyapy.awst_build.context import ASTConversionModuleContext from puyapy.awst_build.subroutine import ContractMethodInfo, FunctionASTConverter from puyapy.awst_build.utils import get_decorators_by_fullname, get_subroutine_decorator_inline_arg from puyapy.models import ( ARC4BareMethodData, ARC4MethodData, ContractClassOptions, ContractFragmentBase, ContractFragmentMethod, ContractFragmentStorage, ) logger = log.get_logger(__name__) _ContractMethodBuilder: typing.TypeAlias = Callable[ [ASTConversionModuleContext], awst_nodes.ContractMethod ] _INIT_METHOD = "__init__" _ARC4_CONTRACT_BASE_CREF = ContractReference(constants.ARC4_CONTRACT_BASE) _SYNTHETIC_LOCATION = SourceLocation(file=None, line=1) class ContractASTConverter(BaseMyPyStatementVisitor[None]): def __init__( self, context: ASTConversionModuleContext, class_def: mypy.nodes.ClassDef, class_options: ContractClassOptions, typ: pytypes.ContractType, ): super().__init__(context=context) class_loc = self._location(class_def) fragment_mro = _build_resolved_mro(context, typ) if class_options.state_totals is None: base_with_defined = next( (b for b in fragment_mro if b.options and (b.options.state_totals is not None)), None, ) if base_with_defined: logger.warning( f"Contract extends base contract {base_with_defined.id} " "with explicit state_totals, but does not define its own state_totals. " "This could result in insufficient reserved state at run time.", location=class_loc, ) self.fragment: typing.Final = _ContractFragment( id=typ.name, source_location=class_loc, pytype=typ, mro=fragment_mro, is_abstract=_check_class_abstractness(context, class_def), options=class_options, docstring=class_def.docstring, ) # TODO: validation for state proxies being non-conditional _build_symbols_and_state(context, self.fragment, class_def.info.names) self._deferred_methods = list[tuple[ContractFragmentMethod, _ContractMethodBuilder]]() # if the class has an __init__ method, we need to visit it first, so any storage # fields cane be resolved to a (static) key match class_def.info.names.get(_INIT_METHOD): case mypy.nodes.SymbolTableNode(node=mypy.nodes.Statement() as init_node): stmts = unique((init_node, *class_def.defs.body)) case _: stmts = class_def.defs.body # note: we iterate directly and catch+log code errors here, # since each statement should be somewhat independent given # the constraints we place (e.g. if one function fails to convert, # we can still keep trying to convert other functions to produce more valid errors) for stmt in stmts: with context.log_exceptions(fallback_location=stmt): stmt.accept(self) if ( self.fragment.is_arc4 and not self.fragment.is_abstract and not any(self.fragment.find_arc4_method_metadata(can_create=True)) ): self._insert_default_arc4_create(self.fragment) context.add_contract_fragment(self.fragment) @staticmethod def _insert_default_arc4_create(fragment: "_ContractFragment") -> None: if any(fragment.find_arc4_method_metadata(bare=True, oca=OnCompletionAction.NoOp)): logger.error( "Non-abstract ARC4 contract has no methods that can be called" " to create the contract, but does have a NoOp bare method," " so one couldn't be inserted." " In order to allow creating the contract add either" " an @abimethod or @baremethod" ' decorated method with create="require" or create="allow"', location=fragment.source_location, ) else: default_create_name = "__algopy_default_create" while fragment.resolve_symbol(default_create_name): # ensure uniqueness default_create_name = f"_{default_create_name}" default_create_config = awst_nodes.ARC4BareMethodConfig( create=awst_nodes.ARC4CreateOption.require, source_location=_SYNTHETIC_LOCATION, ) fragment.add_method( ContractFragmentMethod( member_name=default_create_name, source_location=_SYNTHETIC_LOCATION, metadata=ARC4BareMethodData( member_name=default_create_name, pytype=( pytypes.FuncType( name=".".join((fragment.id, default_create_name)), args=(), ret_type=pytypes.NoneType, ) ), config=default_create_config, source_location=_SYNTHETIC_LOCATION, ), is_trivial=False, synthetic=True, inheritable=False, implementation=awst_nodes.ContractMethod( cref=fragment.id, member_name=default_create_name, args=[], return_type=wtypes.void_wtype, body=awst_nodes.Block( body=[], source_location=_SYNTHETIC_LOCATION, ), documentation=awst_nodes.MethodDocumentation(), arc4_method_config=default_create_config, source_location=_SYNTHETIC_LOCATION, inline=True, ), ) ) def build(self, context: ASTConversionModuleContext) -> awst_nodes.Contract | None: for method_fragment, method_builder in self._deferred_methods: with context.log_exceptions(fallback_location=method_fragment.source_location): method_fragment.implementation = method_builder(context) if self.fragment.is_abstract: return None approval_program = None approval_method = self.fragment.resolve_method(constants.APPROVAL_METHOD) if approval_method is None or approval_method.is_trivial: logger.error( "non-abstract contract class missing approval program", location=self.fragment.source_location, ) elif approval_method.implementation is None: pass # error during method construction, already logged else: approval_program = approval_method.implementation if self.fragment.resolve_method(_INIT_METHOD) is not None: approval_program = _insert_init_call_on_create( self.fragment.id, approval_program.return_type ) clear_method = self.fragment.resolve_method(constants.CLEAR_STATE_METHOD) if clear_method is None or clear_method.is_trivial: logger.error( "non-abstract contract class missing clear-state program", location=self.fragment.source_location, ) clear_program = None else: clear_program = clear_method.implementation if approval_program is None or clear_program is None: return None return awst_nodes.Contract( id=self.fragment.id, name=self.fragment.options.name_override or self.fragment.pytype.class_name, method_resolution_order=[ancestor.id for ancestor in self.fragment.mro], approval_program=approval_program, clear_program=clear_program, methods=tuple( cm.implementation for cm in self.fragment.methods(include_overridden=True) if cm.implementation is not None ), app_state=tuple( state_decl.definition for state_decl in self.fragment.state() if state_decl.definition is not None ), description=self.fragment.docstring, source_location=self.fragment.source_location, reserved_scratch_space=self.fragment.reserved_scratch_space, state_totals=self.fragment.options.state_totals, avm_version=self.fragment.options.avm_version, ) def empty_statement(self, _stmt: mypy.nodes.Statement) -> None: return None def visit_function( self, func_def: mypy.nodes.FuncDef, decorator: mypy.nodes.Decorator | None ) -> None: func_loc = self._location(func_def) method_name = func_def.name if func_def.is_class: raise CodeError("@classmethod not supported", func_loc) if func_def.is_static: raise CodeError( "@staticmethod not supported, use a module level function instead", func_loc ) if func_def.type is None: raise CodeError("function is untyped", func_loc) if len(func_def.arguments) < 1: # since we checked we're only handling instance methods, should be at least one # argument to function - ie self logger.error(f"{method_name} should take a self parameter", location=func_loc) dec_by_fullname = get_decorators_by_fullname(self.context, decorator) if decorator else {} subroutine_dec = dec_by_fullname.pop(constants.SUBROUTINE_HINT, None) abimethod_dec = dec_by_fullname.pop(constants.ABIMETHOD_DECORATOR, None) baremethod_dec = dec_by_fullname.pop(constants.BAREMETHOD_DECORATOR, None) for unknown_dec_fullname, dec in dec_by_fullname.items(): self._error(f'unsupported decorator "{unknown_dec_fullname}"', dec) # TODO: handle difference of subroutine vs abimethod and overrides??? inline = None if subroutine_dec is not None: inline = get_subroutine_decorator_inline_arg(self.context, subroutine_dec) arc4_method_data: ARC4MethodData | None = None if method_name in (_INIT_METHOD, constants.APPROVAL_METHOD, constants.CLEAR_STATE_METHOD): for invalid_dec in (subroutine_dec, abimethod_dec, baremethod_dec): if invalid_dec is not None: self._error("method should not be decorated", location=invalid_dec) elif method_name.startswith("__") and method_name.endswith("__"): raise CodeError( "methods starting and ending with a double underscore" ' (aka "dunder" methods) are reserved for the Python data model' " (https://docs.python.org/3/reference/datamodel.html)." " Of these methods, only __init__ is supported in contract classes", func_loc, ) elif not self.fragment.is_arc4: if subroutine_dec is None: logger.error( f"missing @{constants.SUBROUTINE_HINT_ALIAS} decorator", location=func_loc ) for invalid_dec in (abimethod_dec, baremethod_dec): if invalid_dec is not None: self._error( f"decorator is only valid in subclasses of {pytypes.ARC4ContractBaseType}", invalid_dec, ) else: if len(list(filter(None, (subroutine_dec, abimethod_dec, baremethod_dec)))) != 1: logger.error( f"ARC-4 contract member functions" f" (other than __init__ or approval / clear program methods)" f" must be annotated with exactly one of" f" @{constants.SUBROUTINE_HINT_ALIAS}," f" @{constants.ABIMETHOD_DECORATOR_ALIAS}," f" or @{constants.BAREMETHOD_DECORATOR_ALIAS}", location=func_loc, ) if abimethod_dec: arc4_method_data = get_arc4_abimethod_data(self.context, abimethod_dec, func_def) elif baremethod_dec: arc4_method_data = get_arc4_baremethod_data(self.context, baremethod_dec, func_def) else: arc4_method_data = None # TODO: validate against super-class configs?? source_location = self._location(decorator or func_def) obj = ContractFragmentMethod( member_name=method_name, source_location=source_location, metadata=arc4_method_data, is_trivial=func_def.is_trivial_body, synthetic=False, inheritable=True, implementation=None, ) self.fragment.add_method(obj) if obj.is_trivial: logger.debug(f"skipping trivial method {method_name}", location=func_loc) else: self._deferred_methods.append( ( obj, lambda ctx: FunctionASTConverter.convert( ctx, func_def=func_def, source_location=source_location, inline=inline, contract_method_info=ContractMethodInfo( fragment=self.fragment, contract_type=self.fragment.pytype, arc4_method_config=( arc4_method_data.config if arc4_method_data else None ), is_abstract=self.fragment.is_abstract, ), ), ) ) def visit_block(self, o: mypy.nodes.Block) -> None: raise InternalError("shouldn't get here", self._location(o)) def visit_return_stmt(self, stmt: mypy.nodes.ReturnStmt) -> None: self._error("illegal Python syntax, return in class body", location=stmt) def visit_class_def(self, cdef: mypy.nodes.ClassDef) -> None: self._error("nested classes are not supported", location=cdef) def _unsupported_stmt(self, kind: str, stmt: mypy.nodes.Statement) -> None: self._error(f"{kind} statements are not supported in the class body", location=stmt) def visit_assignment_stmt(self, stmt: mypy.nodes.AssignmentStmt) -> None: # just pass on state forward-declarations, these will be picked up by gather state # everything else (ie any _actual_ assignments) is unsupported if not isinstance(stmt.rvalue, mypy.nodes.TempNode): self._unsupported_stmt("assignment", stmt) def visit_operator_assignment_stmt(self, stmt: mypy.nodes.OperatorAssignmentStmt) -> None: self._unsupported_stmt("operator assignment", stmt) def visit_expression_stmt(self, stmt: mypy.nodes.ExpressionStmt) -> None: if isinstance(stmt.expr, mypy.nodes.StrExpr): # ignore class docstring, already extracted # TODO: should we capture field "docstrings"? pass else: self._unsupported_stmt("expression statement", stmt) def visit_if_stmt(self, stmt: mypy.nodes.IfStmt) -> None: self._unsupported_stmt("if", stmt) def visit_while_stmt(self, stmt: mypy.nodes.WhileStmt) -> None: self._unsupported_stmt("while", stmt) def visit_for_stmt(self, stmt: mypy.nodes.ForStmt) -> None: self._unsupported_stmt("for", stmt) def visit_break_stmt(self, stmt: mypy.nodes.BreakStmt) -> None: self._unsupported_stmt("break", stmt) def visit_continue_stmt(self, stmt: mypy.nodes.ContinueStmt) -> None: self._unsupported_stmt("continue", stmt) def visit_assert_stmt(self, stmt: mypy.nodes.AssertStmt) -> None: self._unsupported_stmt("assert", stmt) def visit_del_stmt(self, stmt: mypy.nodes.DelStmt) -> None: self._unsupported_stmt("del", stmt) def visit_match_stmt(self, stmt: mypy.nodes.MatchStmt) -> None: self._unsupported_stmt("match", stmt) def visit_type_alias_stmt(self, stmt: mypy.nodes.TypeAliasStmt) -> None: self._unsupported_stmt("type", stmt) class _UserContractBase(ContractFragmentBase, abc.ABC): @property @abc.abstractmethod def options(self) -> ContractClassOptions | None: ... @attrs.frozen class _StaticContractBase(_UserContractBase): id: ContractReference methods_: dict[str, ContractFragmentMethod] mro: Sequence[ContractFragmentBase] symbols: dict[str, pytypes.PyType] options: None = None @typing.override def resolve_method( self, name: str, *, include_inherited: bool = True ) -> ContractFragmentMethod | None: return self.methods_.get(name) @typing.override def methods( self, *, include_inherited: bool = True, include_overridden: bool = False ) -> Iterator[ContractFragmentMethod]: yield from self.methods_.values() @typing.override def resolve_storage( self, name: str, *, include_inherited: bool = True ) -> ContractFragmentStorage | None: return None @typing.override def state(self, *, include_inherited: bool = True) -> Iterator[ContractFragmentStorage]: yield from () @attrs.frozen(kw_only=True) class _ContractFragment(_UserContractBase): id: ContractReference source_location: SourceLocation pytype: pytypes.ContractType mro: Sequence[_UserContractBase] is_abstract: bool options: ContractClassOptions docstring: str | None _methods: dict[str, ContractFragmentMethod] = attrs.field(factory=dict, init=False) _state_defs: dict[str, ContractFragmentStorage] = attrs.field(factory=dict, init=False) symbols: dict[str, pytypes.PyType | None] = attrs.field(factory=dict, init=False) @property def is_arc4(self) -> bool: return pytypes.ARC4ContractBaseType in self.pytype.mro def add_method(self, method: ContractFragmentMethod) -> None: set_result = self._methods.setdefault(method.member_name, method) if set_result is not method: logger.info( f"previous definition of {method.member_name} was here", location=set_result.source_location, ) logger.error( f"redefinition of {method.member_name}", location=method.source_location, ) @typing.override def resolve_method( self, name: str, *, include_inherited: bool = True ) -> ContractFragmentMethod | None: with contextlib.suppress(KeyError): return self._methods[name] if include_inherited: for fragment in self.mro: method = fragment.resolve_method(name, include_inherited=False) if method and method.inheritable: return method return None @typing.override def methods( self, *, include_inherited: bool = True, include_overridden: bool = False ) -> Iterator[ContractFragmentMethod]: yield from self._methods.values() if include_inherited: seen_names = set(self._methods.keys()) for fragment in self.mro: for method in fragment.methods(include_inherited=False): if method.inheritable and ( include_overridden or set_add(seen_names, method.member_name) ): yield method @typing.override def resolve_storage( self, name: str, *, include_inherited: bool = True ) -> ContractFragmentStorage | None: with contextlib.suppress(KeyError): return self._state_defs[name] if include_inherited: for fragment in self.mro: result = fragment.resolve_storage(name, include_inherited=False) if result is not None: return result return None def add_state(self, decl: ContractFragmentStorage) -> None: existing = self.resolve_storage(decl.member_name) self._state_defs.setdefault(decl.member_name, decl) if existing is not None: logger.info( f"previous definition of {decl.member_name} was here", location=existing.source_location, ) logger.error( f"redefinition of {decl.member_name}", location=decl.source_location, ) @typing.override def state(self, *, include_inherited: bool = True) -> Iterator[ContractFragmentStorage]: result = self._state_defs if include_inherited: for ancestor in self.mro: result = { s.member_name: s for s in ancestor.state(include_inherited=False) } | result yield from result.values() @property def reserved_scratch_space(self) -> Set[int]: return StableSet[int].from_iter( num for c in (self, *self.mro) if c.options and c.options.scratch_slot_reservations for num in c.options.scratch_slot_reservations ) def _insert_init_call_on_create( current_contract: ContractReference, return_type: wtypes.WType ) -> awst_nodes.ContractMethod: call_init = awst_nodes.Block( comment="call __init__", body=[ awst_nodes.ExpressionStatement( expr=awst_nodes.SubroutineCallExpression( target=awst_nodes.InstanceMethodTarget(member_name=_INIT_METHOD), args=[], wtype=wtypes.void_wtype, source_location=_SYNTHETIC_LOCATION, ) ) ], source_location=_SYNTHETIC_LOCATION, ) call_init_on_create = awst_nodes.IfElse( condition=awst_nodes.Not( expr=intrinsic_factory.txn("ApplicationID", wtypes.bool_wtype, _SYNTHETIC_LOCATION), source_location=_SYNTHETIC_LOCATION, ), if_branch=call_init, else_branch=None, source_location=_SYNTHETIC_LOCATION, ) return awst_nodes.ContractMethod( cref=current_contract, member_name="__algopy_entrypoint_with_init", args=[], arc4_method_config=None, return_type=return_type, documentation=awst_nodes.MethodDocumentation(), body=awst_nodes.Block( body=[ call_init_on_create, awst_nodes.ReturnStatement( value=awst_nodes.SubroutineCallExpression( target=awst_nodes.InstanceMethodTarget( member_name=constants.APPROVAL_METHOD, ), args=[], wtype=return_type, source_location=_SYNTHETIC_LOCATION, ), source_location=_SYNTHETIC_LOCATION, ), ], source_location=_SYNTHETIC_LOCATION, ), source_location=_SYNTHETIC_LOCATION, ) def _build_resolved_mro( context: ASTConversionModuleContext, contract_type: pytypes.ContractType ) -> list[_UserContractBase]: class_def_loc = contract_type.source_location contract_bases_mro = list[_UserContractBase]() for ancestor in contract_type.mro: if ancestor == pytypes.ContractBaseType: pass elif ancestor == pytypes.ARC4ContractBaseType: contract_bases_mro.append(_arc4_contract_fragment()) elif isinstance(ancestor, pytypes.ContractType): ancestor_fragment = context.contract_fragments.get(ancestor.name) if isinstance(ancestor_fragment, _ContractFragment): contract_bases_mro.append(ancestor_fragment) else: raise CodeError( f"contract type has non-contract base {ancestor.name}", class_def_loc ) else: raise CodeError(f"base class {ancestor} is not a contract subclass", class_def_loc) return contract_bases_mro def _arc4_contract_fragment() -> _UserContractBase: result = _StaticContractBase(id=_ARC4_CONTRACT_BASE_CREF, mro=(), methods_={}, symbols={}) def add_program_method( name: str, body: Sequence[awst_nodes.Statement], *, return_type: pytypes.RuntimeType = pytypes.BoolType, ) -> None: result.symbols[name] = pytypes.FuncType( name=".".join((_ARC4_CONTRACT_BASE_CREF, name)), args=(), ret_type=return_type, ) implementation = awst_nodes.ContractMethod( cref=_ARC4_CONTRACT_BASE_CREF, member_name=name, source_location=_SYNTHETIC_LOCATION, args=[], arc4_method_config=None, return_type=return_type.wtype, documentation=awst_nodes.MethodDocumentation(), body=awst_nodes.Block(body=body, source_location=_SYNTHETIC_LOCATION), inline=None, ) result.methods_[name] = ContractFragmentMethod( member_name=name, source_location=_SYNTHETIC_LOCATION, metadata=None, is_trivial=False, synthetic=True, inheritable=True, implementation=implementation, ) add_program_method( name=constants.APPROVAL_METHOD, body=[ awst_nodes.ReturnStatement( value=awst_nodes.ARC4Router(source_location=_SYNTHETIC_LOCATION), source_location=_SYNTHETIC_LOCATION, ) ], ) add_program_method( name=constants.CLEAR_STATE_METHOD, body=[ awst_nodes.ReturnStatement( value=awst_nodes.BoolConstant(value=True, source_location=_SYNTHETIC_LOCATION), source_location=_SYNTHETIC_LOCATION, ) ], ) return result def _build_symbols_and_state( context: ASTConversionModuleContext, fragment: _ContractFragment, symtable: mypy.nodes.SymbolTable, ) -> None: cref = fragment.id for name, sym in symtable.items(): node = sym.node assert node, f"mypy cross reference remains unresolved: member {name!r} of {cref!r}" node_loc = context.node_location(node) if isinstance(node, mypy.nodes.OverloadedFuncDef): node = node.impl if isinstance(node, mypy.nodes.Decorator): # we don't support any decorators that would change signature node = node.func pytyp = None if isinstance(node, mypy.nodes.Var | mypy.nodes.FuncDef) and node.type: with contextlib.suppress(CodeError): pytyp = context.type_to_pytype(node.type, source_location=node_loc) fragment.symbols[name] = pytyp if pytyp and not isinstance(pytyp, pytypes.FuncType): definition = None if isinstance(pytyp, pytypes.StorageProxyType): wtypes.validate_persistable(pytyp.content_wtype, node_loc) match pytyp.generic: case pytypes.GenericLocalStateType: kind = awst_nodes.AppStorageKind.account_local case pytypes.GenericGlobalStateType: kind = awst_nodes.AppStorageKind.app_global case pytypes.GenericBoxType: kind = awst_nodes.AppStorageKind.box case None if pytyp == pytypes.BoxRefType: kind = awst_nodes.AppStorageKind.box case _: raise InternalError(f"unhandled StorageProxyType: {pytyp}", node_loc) elif isinstance(pytyp, pytypes.StorageMapProxyType): wtypes.validate_persistable(pytyp.key_wtype, node_loc) wtypes.validate_persistable(pytyp.content_wtype, node_loc) if pytyp.generic != pytypes.GenericBoxMapType: raise InternalError(f"unhandled StorageMapProxyType: {pytyp}", node_loc) kind = awst_nodes.AppStorageKind.box else: # global state, direct wtype = pytyp.checked_wtype(node_loc) wtypes.validate_persistable(wtype, node_loc) key = awst_nodes.BytesConstant( value=name.encode("utf8"), encoding=awst_nodes.BytesEncoding.utf8, source_location=node_loc, wtype=wtypes.state_key, ) kind = awst_nodes.AppStorageKind.app_global definition = awst_nodes.AppStorageDefinition( source_location=node_loc, member_name=name, kind=kind, storage_wtype=wtype, key_wtype=None, key=key, description=None, ) fragment.add_state( ContractFragmentStorage( member_name=name, kind=kind, definition=definition, source_location=node_loc, ) ) def _check_class_abstractness( context: ASTConversionModuleContext, class_def: mypy.nodes.ClassDef ) -> bool: is_abstract = class_def.info.is_abstract # note: we don't support the metaclass= option, so we only need to check for # inheritance of abc.ABC and not metaclass=abc.ABCMeta if is_abstract and not any( base.fullname == "abc.ABC" for base in class_def.info.direct_base_classes() ): context.warning(f"Class {class_def.fullname} is implicitly abstract", class_def) return is_abstract
algorandfoundation/puya
src/puyapy/awst_build/contract.py
Python
NOASSERTION
30,981
algorandfoundation/puya
src/puyapy/awst_build/eb/__init__.py
Python
NOASSERTION
0
import abc import typing import typing_extensions from puya import log from puya.awst.nodes import ( BinaryBooleanOperator, CompileTimeConstantExpression, Expression, Lvalue, SingleEvaluation, Statement, TupleExpression, VarExpression, ) from puya.errors import CodeError, InternalError from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb.factories import builder_for_instance from puyapy.awst_build.eb.interface import ( BuilderBinaryOp, BuilderComparisonOp, BuilderUnaryOp, CallableBuilder, InstanceBuilder, NodeBuilder, TypeBuilder, ) __all__ = [ "FunctionBuilder", "GenericTypeBuilder", "InstanceExpressionBuilder", "NotIterableInstanceExpressionBuilder", ] _TPyType_co = typing_extensions.TypeVar( "_TPyType_co", bound=pytypes.PyType, default=pytypes.PyType, covariant=True ) _TExpression_co = typing_extensions.TypeVar( "_TExpression_co", bound=Expression, default=Expression, covariant=True ) logger = log.get_logger(__name__) class FunctionBuilder(CallableBuilder, abc.ABC): @property @typing.final def pytype(self) -> None: # TODO: give function type return None @typing.override @typing.final def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: from puyapy.awst_build.eb._utils import constant_bool_and_error return constant_bool_and_error(value=True, location=location, negate=negate) @typing.override @typing.final def member_access(self, name: str, location: SourceLocation) -> typing.Never: raise CodeError("function attribute access is not supported", location) class GenericTypeBuilder(CallableBuilder, abc.ABC): # TODO: can we fold this with TypeBuilder? @typing.override @property def pytype(self) -> None: # TODO, take this as an init argument return None @typing.override @typing.final def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: raise CodeError("generic type requires parameters", location) @typing.override @typing.final def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: from puyapy.awst_build.eb._utils import constant_bool_and_error return constant_bool_and_error(value=True, location=location, negate=negate) class InstanceExpressionBuilder( InstanceBuilder[_TPyType_co], typing.Generic[_TPyType_co, _TExpression_co], abc.ABC ): def __init__(self, pytype: _TPyType_co, expr: _TExpression_co): super().__init__(expr.source_location) if expr.wtype != pytype.wtype: raise InternalError( f"invalid expression wtype {str(expr.wtype)!r} for Python type {str(pytype)!r}", expr.source_location, ) self._pytype = pytype self.__expr = expr @typing.override @typing.final @property def pytype(self) -> _TPyType_co: return self._pytype @typing.override def resolve_literal(self, converter: TypeBuilder) -> InstanceBuilder: return self.try_resolve_literal(converter) @typing.override def try_resolve_literal(self, converter: TypeBuilder) -> InstanceBuilder: return self @typing.override @typing.final def resolve_lvalue(self) -> Lvalue: resolved = self.resolve() return _validate_lvalue(self._pytype, resolved) @typing.override def resolve(self) -> _TExpression_co: return self.__expr @typing.override def delete(self, location: SourceLocation) -> Statement: from puyapy.awst_build.eb._utils import dummy_statement logger.error(f"{self.pytype} is not valid as del target", location=location) return dummy_statement(location) @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: raise CodeError(f"unrecognised member of {self.pytype}: {name}", location) @typing.override def unary_op(self, op: BuilderUnaryOp, location: SourceLocation) -> InstanceBuilder: raise CodeError(f"{self.pytype} does not support unary {op.value!r} operator", location) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: return NotImplemented @typing.override def binary_op( self, other: InstanceBuilder, op: BuilderBinaryOp, location: SourceLocation, *, reverse: bool, ) -> InstanceBuilder: return NotImplemented @typing.override def bool_binary_op( self, other: InstanceBuilder, op: BinaryBooleanOperator, location: SourceLocation ) -> InstanceBuilder: return super().bool_binary_op(other, op, location) @typing.override def augmented_assignment( self, op: BuilderBinaryOp, rhs: InstanceBuilder, location: SourceLocation ) -> Statement: from puyapy.awst_build.eb._utils import dummy_statement logger.error(f"{self.pytype} does not support augmented assignment", location=location) return dummy_statement(location) @typing.override def single_eval(self) -> InstanceBuilder: if not isinstance( self.__expr, VarExpression | CompileTimeConstantExpression | SingleEvaluation ): return builder_for_instance(self.pytype, SingleEvaluation(self.__expr)) return self class NotIterableInstanceExpressionBuilder(InstanceExpressionBuilder[_TPyType_co], abc.ABC): @typing.final @typing.override def contains(self, item: InstanceBuilder, location: SourceLocation) -> InstanceBuilder: return super().contains(item, location) @typing.final @typing.override def iterate(self) -> Expression: return super().iterate() @typing.final @typing.override def iterable_item_type(self) -> pytypes.PyType: return super().iterable_item_type() @typing.final @typing.override def index(self, index: InstanceBuilder, location: SourceLocation) -> InstanceBuilder: return super().index(index, location) @typing.final @typing.override def slice_index( self, begin_index: InstanceBuilder | None, end_index: InstanceBuilder | None, stride: InstanceBuilder | None, location: SourceLocation, ) -> InstanceBuilder: return super().slice_index(begin_index, end_index, stride, location) def _validate_lvalue(typ: pytypes.PyType, resolved: Expression) -> Lvalue: if typ == pytypes.NoneType: raise CodeError( "None indicates an empty return and cannot be assigned", resolved.source_location, ) if not isinstance(resolved, Lvalue): raise CodeError( "expression is not valid as an assignment target", resolved.source_location ) if isinstance(resolved, TupleExpression): assert isinstance(typ, pytypes.TupleLikeType) for item_typ, item in zip(typ.items, resolved.items, strict=True): _validate_lvalue(item_typ, item) return resolved
algorandfoundation/puya
src/puyapy/awst_build/eb/_base.py
Python
NOASSERTION
7,260
import abc import typing from collections.abc import Sequence import mypy.nodes import typing_extensions from puya.awst.nodes import Expression, ReinterpretCast from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import FunctionBuilder, InstanceExpressionBuilder from puyapy.awst_build.eb._utils import cast_to_bytes from puyapy.awst_build.eb.bytes import BytesExpressionBuilder from puyapy.awst_build.eb.factories import builder_for_instance from puyapy.awst_build.eb.interface import InstanceBuilder, NodeBuilder, TypeBuilder _TPyType_co = typing_extensions.TypeVar( "_TPyType_co", bound=pytypes.PyType, default=pytypes.PyType, covariant=True ) class BytesBackedTypeBuilder(TypeBuilder[_TPyType_co], abc.ABC): @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: typ = self.produces() match name: case "from_bytes": return _FromBytes(typ, location) case _: return super().member_access(name, location) class _FromBytes(FunctionBuilder): def __init__(self, result_type: pytypes.PyType, location: SourceLocation): super().__init__(location) self.result_type = result_type @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.exactly_one_arg_of_type_else_dummy( args, pytypes.BytesType, location, resolve_literal=True ) result_expr = ReinterpretCast( expr=arg.resolve(), wtype=self.result_type.checked_wtype(location), source_location=location, ) return builder_for_instance(self.result_type, result_expr) class BytesBackedInstanceExpressionBuilder(InstanceExpressionBuilder[_TPyType_co], abc.ABC): _bytes_member: typing.ClassVar[str] def __init_subclass__(cls, *, bytes_member: str = "bytes", **kwargs: object): super().__init_subclass__(**kwargs) cls._bytes_member = bytes_member @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: if name == self._bytes_member: return self.bytes(location) else: return super().member_access(name, location) def bytes(self, location: SourceLocation) -> BytesExpressionBuilder: return BytesExpressionBuilder(self.to_bytes(location)) @typing.override def to_bytes(self, location: SourceLocation) -> Expression: return cast_to_bytes(self.resolve(), location)
algorandfoundation/puya
src/puyapy/awst_build/eb/_bytes_backed.py
Python
NOASSERTION
2,775
import typing from collections.abc import Callable, Sequence from itertools import zip_longest from puya import log from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb._utils import dummy_value from puyapy.awst_build.eb.interface import InstanceBuilder, NodeBuilder from puyapy.awst_build.utils import maybe_resolve_literal _T = typing.TypeVar("_T") _TBuilder = typing.TypeVar("_TBuilder", bound=NodeBuilder) logger = log.get_logger(__name__) def at_most_one_arg( args: Sequence[NodeBuilder], location: SourceLocation ) -> InstanceBuilder | None: if not args: return None first, *rest = args if rest: logger.error(f"expected at most 1 argument, got {len(args)}", location=location) return instance_builder(first, default=default_none) def at_most_one_arg_of_type( args: Sequence[NodeBuilder], valid_types: Sequence[pytypes.PyType], location: SourceLocation ) -> InstanceBuilder | None: if not args: return None first, *rest = args if rest: logger.error(f"expected at most 1 argument, got {len(args)}", location=location) if isinstance(first, InstanceBuilder) and first.pytype.is_type_or_subtype(*valid_types): return first return not_this_type(first, default=default_none) def default_raise(msg: str, location: SourceLocation) -> typing.Never: from puya.errors import CodeError raise CodeError(msg, location) def default_fixed_value(value: _T) -> Callable[[str, SourceLocation], _T]: def defaulter(msg: str, location: SourceLocation) -> _T: # noqa: ARG001 return value return defaulter default_none: typing.Final = default_fixed_value(None) def default_dummy_value( pytype: pytypes.PyType, ) -> Callable[[str, SourceLocation], InstanceBuilder]: def defaulter(msg: str, location: SourceLocation) -> InstanceBuilder: # noqa: ARG001 return dummy_value(pytype, location) return defaulter def not_this_type(node: NodeBuilder, default: Callable[[str, SourceLocation], _T]) -> _T: """Provide consistent error messages for unexpected types.""" if isinstance(node.pytype, pytypes.UnionType): msg = "type unions are unsupported at this location" else: msg = "unexpected argument type" result = default(msg, node.source_location) logger.error(msg, location=node.source_location) return result def at_least_one_arg( args: Sequence[_TBuilder], location: SourceLocation, *, default: Callable[[str, SourceLocation], _T], ) -> tuple[InstanceBuilder | _T, Sequence[_TBuilder]]: if not args: msg = "expected at least 1 argument, got 0" result = default(msg, location) logger.error(msg, location=location) return result, [] first, *rest = args return instance_builder(first, default=default), rest def exactly_one_arg( args: Sequence[NodeBuilder], location: SourceLocation, *, default: Callable[[str, SourceLocation], _T], ) -> InstanceBuilder | _T: if not args: msg = "expected 1 argument, got 0" result = default(msg, location) logger.error(msg, location=location) return result first, *rest = args if rest: logger.error(f"expected 1 argument, got {len(args)}", location=location) return instance_builder(first, default=default) def exactly_one_arg_of_type( args: Sequence[NodeBuilder], expected: pytypes.PyType, location: SourceLocation, *, default: Callable[[str, SourceLocation], _T], resolve_literal: bool = False, ) -> InstanceBuilder | _T: if not args: msg = "expected 1 argument, got 0" result = default(msg, location) logger.error(msg, location=location) return result first, *rest = args if rest: logger.error(f"expected 1 argument, got {len(args)}", location=location) if resolve_literal: first = maybe_resolve_literal(first, expected) if isinstance(first, InstanceBuilder) and expected <= first.pytype: return first return not_this_type(first, default=default) def exactly_one_arg_of_type_else_dummy( args: Sequence[NodeBuilder], pytype: pytypes.PyType, location: SourceLocation, *, resolve_literal: bool = False, ) -> InstanceBuilder: return exactly_one_arg_of_type( args, pytype, location, default=default_dummy_value(pytype), resolve_literal=resolve_literal, ) def no_args(args: Sequence[NodeBuilder], location: SourceLocation) -> None: if args: logger.error(f"expected 0 arguments, got {len(args)}", location=location) def exactly_n_args_of_type_else_dummy( args: Sequence[NodeBuilder], pytype: pytypes.PyType, location: SourceLocation, num_args: int ) -> Sequence[InstanceBuilder]: if not exactly_n_args(args, location, num_args): dummy_args = [dummy_value(pytype, location)] * num_args args = [arg or default for arg, default in zip_longest(args, dummy_args)] arg_ebs = [argument_of_type_else_dummy(arg, pytype) for arg in args] return arg_ebs[:num_args] def exactly_n_args(args: Sequence[NodeBuilder], location: SourceLocation, num_args: int) -> bool: if len(args) == num_args: return True logger.error( f"expected {num_args} argument{'' if num_args == 1 else 's'}, got {len(args)}", location=location, ) return False def argument_of_type( builder: NodeBuilder, target_type: pytypes.PyType, *additional_types: pytypes.PyType, resolve_literal: bool = False, default: Callable[[str, SourceLocation], _T], ) -> InstanceBuilder | _T: if resolve_literal: builder = maybe_resolve_literal(builder, target_type) if isinstance(builder, InstanceBuilder) and builder.pytype.is_type_or_subtype( target_type, *additional_types ): return builder return not_this_type(builder, default=default) def argument_of_type_else_dummy( builder: NodeBuilder, target_type: pytypes.PyType, *additional_types: pytypes.PyType, resolve_literal: bool = False, ) -> InstanceBuilder: return argument_of_type( builder, target_type, *additional_types, resolve_literal=resolve_literal, default=default_dummy_value(target_type), ) def simple_string_literal( builder: NodeBuilder, *, default: Callable[[str, SourceLocation], _T], ) -> str | _T: from puyapy.awst_build.eb.interface import LiteralBuilder match builder: case LiteralBuilder(value=str(value)): return value case InstanceBuilder(pytype=pytypes.StrLiteralType): msg = "argument must be a simple str literal" result = default(msg, builder.source_location) logger.error(msg, location=builder.source_location) return result case other: return not_this_type(other, default=default) def instance_builder( builder: NodeBuilder, *, default: Callable[[str, SourceLocation], _T] ) -> InstanceBuilder | _T: if isinstance(builder, InstanceBuilder): return builder msg = "expression is not a value" result = default(msg, builder.source_location) logger.error(msg, location=builder.source_location) return result
algorandfoundation/puya
src/puyapy/awst_build/eb/_expect.py
Python
NOASSERTION
7,348
import typing from puya import log from puya.awst.nodes import ( BinaryBooleanOperator, BoolConstant, BytesConstant, BytesEncoding, Expression, Lvalue, Statement, ) from puya.errors import CodeError from puya.parse import SourceLocation from puyapy.awst_build import intrinsic_factory, pytypes from puyapy.awst_build.eb.interface import ( BuilderBinaryOp, BuilderComparisonOp, BuilderUnaryOp, InstanceBuilder, LiteralBuilder, TypeBuilder, ) from puyapy.awst_build.utils import fold_binary_expr, fold_unary_expr from puyapy.models import ConstantValue logger = log.get_logger(__name__) class LiteralBuilderImpl(LiteralBuilder): def __init__(self, value: ConstantValue, source_location: SourceLocation): super().__init__(source_location) self._value = value match value: case bool(): typ: pytypes.PyType = pytypes.BoolType case int(): typ = pytypes.IntLiteralType case str(): typ = pytypes.StrLiteralType case bytes(): typ = pytypes.BytesLiteralType case _: typing.assert_never(value) self._pytype = typ @property def value(self) -> ConstantValue: return self._value @property @typing.override def pytype(self) -> pytypes.PyType: return self._pytype @typing.override def resolve(self) -> Expression: if isinstance(self.value, bool): return BoolConstant(value=self.value, source_location=self.source_location) raise CodeError("a Python literal is not valid at this location", self.source_location) @typing.override def resolve_literal(self, converter: TypeBuilder) -> InstanceBuilder: return converter.convert_literal(literal=self, location=converter.source_location) @typing.override def try_resolve_literal(self, converter: TypeBuilder) -> InstanceBuilder | None: return converter.try_convert_literal(literal=self, location=converter.source_location) @typing.override def resolve_lvalue(self) -> Lvalue: raise CodeError("cannot assign to literal", self.source_location) @typing.override def to_bytes(self, location: SourceLocation) -> Expression: match self.value: case str(str_value): return BytesConstant( value=str_value.encode(), encoding=BytesEncoding.utf8, source_location=location ) case bytes(bytes_value): return BytesConstant( value=bytes_value, encoding=BytesEncoding.unknown, source_location=location ) case bool(): return intrinsic_factory.itob(self.resolve(), location) raise CodeError(f"cannot serialize literal of type {self.pytype}", location) @typing.override def delete(self, location: SourceLocation) -> Statement: raise CodeError("cannot delete literal", location) @typing.override def unary_op(self, op: BuilderUnaryOp, location: SourceLocation) -> LiteralBuilder: folded = fold_unary_expr(location, op.value, self.value) return LiteralBuilderImpl(value=folded, source_location=location) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> LiteralBuilder: if not isinstance(other, LiteralBuilder): return NotImplemented folded = fold_binary_expr(location, op.value, self.value, other.value) return LiteralBuilderImpl(value=folded, source_location=location) @typing.override def binary_op( self, other: InstanceBuilder, op: BuilderBinaryOp, location: SourceLocation, *, reverse: bool, ) -> LiteralBuilder: if not isinstance(other, LiteralBuilder): return NotImplemented lhs, rhs = self.value, other.value if reverse: lhs, rhs = rhs, lhs folded = fold_binary_expr(location, op.value, lhs, rhs) return LiteralBuilderImpl(value=folded, source_location=location) @typing.override def bool_binary_op( self, other: InstanceBuilder, op: BinaryBooleanOperator, location: SourceLocation ) -> InstanceBuilder: if not isinstance(other, LiteralBuilder): return super().bool_binary_op(other, op, location) folded = fold_binary_expr(location, op.value, self.value, other.value) return LiteralBuilderImpl(value=folded, source_location=location) @typing.override def augmented_assignment( self, op: BuilderBinaryOp, rhs: InstanceBuilder, location: SourceLocation ) -> Statement: raise CodeError("cannot assign to literal", location) @typing.override def contains(self, item: InstanceBuilder, location: SourceLocation) -> LiteralBuilder: if not isinstance(item, LiteralBuilder): raise CodeError("cannot perform containment check with non-constant value", location) try: folded = item.value in self.value # type: ignore[operator] except Exception as ex: raise CodeError(str(ex), location) from ex return LiteralBuilderImpl(value=folded, source_location=location) @typing.override def iterate(self) -> typing.Never: raise CodeError("cannot iterate literal") @typing.override def iterable_item_type(self) -> typing.Never: self.iterate() @typing.override def index(self, index: InstanceBuilder, location: SourceLocation) -> LiteralBuilder: if not isinstance(index, LiteralBuilder): raise CodeError("cannot index literal with non-constant value", location) try: folded = self.value[index.value] # type: ignore[index] except Exception as ex: raise CodeError(str(ex), location) from ex return LiteralBuilderImpl(value=folded, source_location=location) @typing.override def slice_index( self, begin_index: InstanceBuilder | None, end_index: InstanceBuilder | None, stride: InstanceBuilder | None, location: SourceLocation, ) -> LiteralBuilder: def _constant_slice_arg(index: InstanceBuilder | None) -> ConstantValue | None: if index is None: return None if not isinstance(index, LiteralBuilder): raise CodeError("cannot slice literal with non-constant value", location) return index.value begin = _constant_slice_arg(begin_index) end = _constant_slice_arg(end_index) stride_ = _constant_slice_arg(stride) try: folded = self.value[begin:end:stride_] # type: ignore[index,misc] except Exception as ex: raise CodeError(str(ex), location) from ex return LiteralBuilderImpl(value=folded, source_location=location) @typing.override def member_access(self, name: str, location: SourceLocation) -> LiteralBuilder: # TODO: support stuff like int.from_bytes etc raise CodeError("unsupported member access from literal", location) @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: if isinstance(self.value, bool): value = self.value warn = False else: value = bool(self.value) warn = True if negate: value = not value if warn: logger.warning(f"expression is always {value}", location=location) return LiteralBuilderImpl(value=value, source_location=location) @typing.override def single_eval(self) -> InstanceBuilder: return self
algorandfoundation/puya
src/puyapy/awst_build/eb/_literals.py
Python
NOASSERTION
7,840
import functools from collections.abc import Callable from puya.awst.nodes import Expression from puya.errors import CodeError, InternalError from puya.parse import SourceLocation from puyapy.awst_build import constants, intrinsic_data, pytypes from puyapy.awst_build.eb import ( arc4, array, biguint, bool as bool_, bytes as bytes_, compiled, ensure_budget, intrinsics, log, none, storage, string, struct, template_variables, transaction, tuple as tuple_, uint64, uint64_enums, unsigned_builtins, ) from puyapy.awst_build.eb.interface import CallableBuilder, InstanceBuilder from puyapy.awst_build.eb.reference_types import account, application, asset __all__ = [ "builder_for_instance", "builder_for_type", ] CallableBuilderFromSourceFactory = Callable[[SourceLocation], CallableBuilder] FUNC_NAME_TO_BUILDER: dict[str, CallableBuilderFromSourceFactory] = { "algopy.arc4.arc4_signature": intrinsics.Arc4SignatureBuilder, "algopy._util.ensure_budget": ensure_budget.EnsureBudgetBuilder, "algopy._util.log": log.LogBuilder, "algopy.arc4.emit": arc4.EmitBuilder, "algopy.itxn.submit_txns": transaction.SubmitInnerTransactionExpressionBuilder, "algopy._compiled.compile_contract": compiled.CompileContractFunctionBuilder, "algopy._compiled.compile_logicsig": compiled.CompileLogicSigFunctionBuilder, "algopy.arc4.arc4_create": arc4.ARC4CreateFunctionBuilder, "algopy.arc4.arc4_update": arc4.ARC4UpdateFunctionBuilder, constants.CLS_ARC4_ABI_CALL: arc4.ABICallGenericTypeBuilder, "algopy._template_variables.TemplateVar": ( template_variables.GenericTemplateVariableExpressionBuilder ), **{ (fullname := "".join((constants.ALGOPY_OP_PREFIX, name))): functools.partial( intrinsics.IntrinsicFunctionExpressionBuilder, fullname, mappings ) for name, mappings in intrinsic_data.FUNC_TO_AST_MAPPER.items() }, } PYTYPE_TO_TYPE_BUILDER: dict[pytypes.PyType, CallableBuilderFromSourceFactory] = { pytypes.NoneType: none.NoneTypeBuilder, pytypes.BoolType: bool_.BoolTypeBuilder, pytypes.GenericTupleType: tuple_.GenericTupleTypeBuilder, pytypes.reversedGenericType: unsigned_builtins.ReversedFunctionExpressionBuilder, pytypes.urangeType: unsigned_builtins.UnsignedRangeBuilder, pytypes.uenumerateGenericType: unsigned_builtins.UnsignedEnumerateBuilder, pytypes.OpUpFeeSourceType: uint64_enums.OpUpFeeSourceTypeBuilder, pytypes.GenericBoxType: storage.BoxGenericTypeExpressionBuilder, pytypes.BoxRefType: storage.BoxRefTypeBuilder, pytypes.GenericBoxMapType: storage.BoxMapGenericTypeExpressionBuilder, pytypes.GenericLocalStateType: storage.LocalStateGenericTypeBuilder, pytypes.GenericGlobalStateType: storage.GlobalStateGenericTypeBuilder, pytypes.ARC4AddressType: arc4.AddressTypeBuilder, pytypes.ARC4BoolType: arc4.ARC4BoolTypeBuilder, pytypes.ARC4ByteType: functools.partial(arc4.UIntNTypeBuilder, pytypes.ARC4ByteType), pytypes.GenericARC4DynamicArrayType: arc4.DynamicArrayGenericTypeBuilder, pytypes.GenericARC4StaticArrayType: arc4.StaticArrayGenericTypeBuilder, pytypes.ARC4StringType: arc4.ARC4StringTypeBuilder, pytypes.GenericARC4TupleType: arc4.ARC4TupleGenericTypeBuilder, pytypes.ARC4DynamicBytesType: arc4.DynamicBytesTypeBuilder, pytypes.AccountType: account.AccountTypeBuilder, pytypes.GenericArrayType: array.ArrayGenericTypeBuilder, pytypes.AssetType: asset.AssetTypeBuilder, pytypes.ApplicationType: application.ApplicationTypeBuilder, pytypes.BigUIntType: biguint.BigUIntTypeBuilder, pytypes.BytesType: bytes_.BytesTypeBuilder, pytypes.StringType: string.StringTypeBuilder, pytypes.UInt64Type: uint64.UInt64TypeBuilder, pytypes.TransactionTypeType: uint64_enums.TransactionTypeTypeBuilder, pytypes.OnCompleteActionType: uint64_enums.OnCompletionActionTypeBuilder, **{ op_enum_typ: functools.partial(intrinsics.IntrinsicEnumTypeBuilder, op_enum_typ) for op_enum_typ in pytypes.OpEnumTypes }, **{ op_namespace_typ: functools.partial( intrinsics.IntrinsicNamespaceTypeBuilder, op_namespace_typ ) for op_namespace_typ in pytypes.OpNamespaceTypes }, **{ gtxn_pytyp: functools.partial(transaction.GroupTransactionTypeBuilder, gtxn_pytyp) for gtxn_pytyp in ( pytypes.GroupTransactionBaseType, *pytypes.GroupTransactionTypes.values(), ) }, **{ itxn_fieldset_pytyp: functools.partial( transaction.InnerTxnParamsTypeBuilder, itxn_fieldset_pytyp ) for itxn_fieldset_pytyp in pytypes.InnerTransactionFieldsetTypes.values() }, **{ itxn_result_pytyp: functools.partial( transaction.InnerTransactionTypeBuilder, itxn_result_pytyp ) for itxn_result_pytyp in pytypes.InnerTransactionResultTypes.values() }, } CallableBuilderFromPyTypeAndSourceFactory = Callable[ [pytypes.PyType, SourceLocation], CallableBuilder ] PYTYPE_GENERIC_TO_TYPE_BUILDER: dict[ pytypes.PyType | None, CallableBuilderFromPyTypeAndSourceFactory ] = { pytypes.uenumerateGenericType: ( # TODO: fixme, should accept type lambda _, loc: unsigned_builtins.UnsignedEnumerateBuilder(loc) ), pytypes.reversedGenericType: ( # TODO: fixme, should accept type lambda _, loc: unsigned_builtins.ReversedFunctionExpressionBuilder(loc) ), pytypes.GenericTemplateVarType: template_variables.TemplateVariableExpressionBuilder, pytypes.GenericABICallWithReturnType: arc4.ABICallTypeBuilder, pytypes.GenericLocalStateType: storage.LocalStateTypeBuilder, pytypes.GenericGlobalStateType: storage.GlobalStateTypeBuilder, pytypes.GenericBoxType: storage.BoxTypeBuilder, pytypes.GenericBoxMapType: storage.BoxMapTypeBuilder, pytypes.GenericARC4TupleType: arc4.ARC4TupleTypeBuilder, pytypes.GenericTupleType: tuple_.TupleTypeBuilder, pytypes.GenericArrayType: array.ArrayTypeBuilder, pytypes.GenericARC4UFixedNxMType: arc4.UFixedNxMTypeBuilder, pytypes.GenericARC4BigUFixedNxMType: arc4.UFixedNxMTypeBuilder, pytypes.GenericARC4UIntNType: arc4.UIntNTypeBuilder, pytypes.GenericARC4BigUIntNType: arc4.UIntNTypeBuilder, pytypes.GenericARC4DynamicArrayType: arc4.DynamicArrayTypeBuilder, pytypes.GenericARC4StaticArrayType: arc4.StaticArrayTypeBuilder, } PYTYPE_BASE_TO_TYPE_BUILDER: dict[pytypes.PyType, CallableBuilderFromPyTypeAndSourceFactory] = { pytypes.ARC4StructBaseType: arc4.ARC4StructTypeBuilder, pytypes.StructBaseType: struct.StructSubclassExpressionBuilder, pytypes.NamedTupleBaseType: tuple_.NamedTupleTypeBuilder, } PYTYPE_TO_BUILDER: dict[pytypes.PyType, Callable[[Expression], InstanceBuilder]] = { pytypes.ARC4BoolType: arc4.ARC4BoolExpressionBuilder, pytypes.ARC4StringType: arc4.ARC4StringExpressionBuilder, pytypes.ARC4DynamicBytesType: arc4.DynamicBytesExpressionBuilder, pytypes.ARC4ByteType: functools.partial(arc4.UIntNExpressionBuilder, typ=pytypes.ARC4ByteType), pytypes.ARC4AddressType: arc4.AddressExpressionBuilder, pytypes.AccountType: account.AccountExpressionBuilder, pytypes.ApplicationType: application.ApplicationExpressionBuilder, pytypes.AssetType: asset.AssetExpressionBuilder, pytypes.BigUIntType: biguint.BigUIntExpressionBuilder, pytypes.BoolType: bool_.BoolExpressionBuilder, pytypes.BytesType: bytes_.BytesExpressionBuilder, pytypes.CompiledContractType: compiled.CompiledContractExpressionBuilder, pytypes.CompiledLogicSigType: compiled.CompiledLogicSigExpressionBuilder, pytypes.StringType: string.StringExpressionBuilder, pytypes.UInt64Type: uint64.UInt64ExpressionBuilder, pytypes.NoneType: none.NoneExpressionBuilder, pytypes.NeverType: none.NoneExpressionBuilder, # we treat Never as None/void synonym for now pytypes.BoxRefType: storage.BoxRefProxyExpressionBuilder, # bound **{ uint64_enum_typ: functools.partial( uint64.UInt64ExpressionBuilder, enum_type=uint64_enum_typ ) for uint64_enum_typ in ( pytypes.OpUpFeeSourceType, pytypes.TransactionTypeType, pytypes.OnCompleteActionType, ) }, **{ gtxn_pytyp: functools.partial( transaction.GroupTransactionExpressionBuilder, typ=gtxn_pytyp ) for gtxn_pytyp in ( pytypes.GroupTransactionBaseType, *pytypes.GroupTransactionTypes.values(), ) }, **{ itxn_fieldset_pytyp: functools.partial( transaction.InnerTxnParamsExpressionBuilder, itxn_fieldset_pytyp ) for itxn_fieldset_pytyp in pytypes.InnerTransactionFieldsetTypes.values() }, **{ itxn_result_pytyp: functools.partial( transaction.InnerTransactionExpressionBuilder, typ=itxn_result_pytyp ) for itxn_result_pytyp in pytypes.InnerTransactionResultTypes.values() }, } InstanceBuilderFromExpressionAndPyTypeFactory = Callable[ [Expression, pytypes.PyType], InstanceBuilder ] PYTYPE_GENERIC_TO_BUILDER: dict[ pytypes.PyType | None, InstanceBuilderFromExpressionAndPyTypeFactory ] = { pytypes.GenericTupleType: tuple_.TupleExpressionBuilder, pytypes.GenericBoxType: storage.BoxProxyExpressionBuilder, pytypes.GenericBoxMapType: storage.BoxMapProxyExpressionBuilder, pytypes.GenericArrayType: array.ArrayExpressionBuilder, pytypes.GenericARC4DynamicArrayType: arc4.DynamicArrayExpressionBuilder, pytypes.GenericARC4StaticArrayType: arc4.StaticArrayExpressionBuilder, pytypes.GenericARC4TupleType: arc4.ARC4TupleExpressionBuilder, pytypes.GenericARC4UFixedNxMType: arc4.UFixedNxMExpressionBuilder, pytypes.GenericARC4BigUFixedNxMType: arc4.UFixedNxMExpressionBuilder, pytypes.GenericARC4UIntNType: arc4.UIntNExpressionBuilder, pytypes.GenericARC4BigUIntNType: arc4.UIntNExpressionBuilder, pytypes.GenericGlobalStateType: storage.GlobalStateExpressionBuilder, pytypes.GenericLocalStateType: storage.LocalStateExpressionBuilder, } PYTYPE_BASE_TO_BUILDER: dict[pytypes.PyType, InstanceBuilderFromExpressionAndPyTypeFactory] = { pytypes.ARC4StructBaseType: arc4.ARC4StructExpressionBuilder, pytypes.StructBaseType: struct.StructExpressionBuilder, pytypes.NamedTupleBaseType: tuple_.TupleExpressionBuilder, } def builder_for_instance(pytyp: pytypes.PyType, expr: Expression) -> InstanceBuilder: if eb := PYTYPE_TO_BUILDER.get(pytyp): return eb(expr) if eb_param_generic := PYTYPE_GENERIC_TO_BUILDER.get(pytyp.generic): return eb_param_generic(expr, pytyp) for base in pytyp.mro: if eb_base := PYTYPE_BASE_TO_BUILDER.get(base): return eb_base(expr, pytyp) if isinstance(pytyp, pytypes.UnionType): raise CodeError("type unions are unsupported at this location", expr.source_location) raise InternalError(f"no builder for instance: {pytyp}", expr.source_location) def builder_for_type(pytyp: pytypes.PyType, expr_loc: SourceLocation) -> CallableBuilder: if tb := PYTYPE_TO_TYPE_BUILDER.get(pytyp): return tb(expr_loc) if tb_param_generic := PYTYPE_GENERIC_TO_TYPE_BUILDER.get(pytyp.generic): return tb_param_generic(pytyp, expr_loc) for base in pytyp.mro: if tb_base := PYTYPE_BASE_TO_TYPE_BUILDER.get(base): return tb_base(pytyp, expr_loc) if isinstance(pytyp, pytypes.UnionType): raise CodeError("type unions are unsupported at this location", expr_loc) raise InternalError(f"no builder for type: {pytyp}", expr_loc)
algorandfoundation/puya
src/puyapy/awst_build/eb/_type_registry.py
Python
NOASSERTION
11,785
from puya import log from puya.awst import wtypes from puya.awst.nodes import ( BytesComparisonExpression, EqualityComparison, Expression, ExpressionStatement, ReinterpretCast, Statement, VarExpression, ) from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb.factories import builder_for_instance from puyapy.awst_build.eb.interface import ( BuilderBinaryOp, BuilderComparisonOp, BuilderUnaryOp, InstanceBuilder, LiteralBuilder, ) logger = log.get_logger(__name__) def dummy_value(pytype: pytypes.PyType, location: SourceLocation) -> InstanceBuilder: if isinstance(pytype, pytypes.LiteralOnlyType): from puyapy.awst_build.eb._literals import LiteralBuilderImpl return LiteralBuilderImpl(pytype.python_type(), location) expr = VarExpression(name="", wtype=pytype.checked_wtype(location), source_location=location) return builder_for_instance(pytype, expr) def dummy_statement(location: SourceLocation) -> Statement: return ExpressionStatement( VarExpression( name="", wtype=wtypes.void_wtype, source_location=location, ) ) def resolve_negative_literal_index( index: InstanceBuilder, length: InstanceBuilder, location: SourceLocation ) -> InstanceBuilder: match index: case LiteralBuilder(value=int(int_index)) if int_index < 0: return length.binary_op( index.unary_op(BuilderUnaryOp.negative, location), BuilderBinaryOp.sub, location, reverse=False, ) case _: from puyapy.awst_build.eb.uint64 import UInt64TypeBuilder return index.resolve_literal(UInt64TypeBuilder(index.source_location)) def constant_bool_and_error( *, value: bool, location: SourceLocation, negate: bool = False ) -> InstanceBuilder: """ Returns a constant bool instance builder for the specified value and negate combination. Always emits an error as either allowing the expression would result in a semantic compatability issue, or indicates the user has most likely made a mistake """ from puyapy.awst_build.eb._literals import LiteralBuilderImpl if negate: value = not value logger.error(f"expression is always {value}", location=location) return LiteralBuilderImpl(value=value, source_location=location) def compare_bytes( *, self: InstanceBuilder, op: BuilderComparisonOp, other: InstanceBuilder, source_location: SourceLocation, ) -> InstanceBuilder: # defer to most derived type if not equal if not (other.pytype <= self.pytype): return NotImplemented return _compare_expr_bytes_unchecked(self.resolve(), op, other.resolve(), source_location) def compare_expr_bytes( *, lhs: Expression, op: BuilderComparisonOp, rhs: Expression, source_location: SourceLocation, ) -> InstanceBuilder: if rhs.wtype != lhs.wtype: return NotImplemented return _compare_expr_bytes_unchecked(lhs, op, rhs, source_location) def _compare_expr_bytes_unchecked( lhs: Expression, op: BuilderComparisonOp, rhs: Expression, location: SourceLocation, ) -> InstanceBuilder: from puyapy.awst_build.eb.bool import BoolExpressionBuilder try: eq_op = EqualityComparison(op.value) except ValueError: return NotImplemented cmp_expr = BytesComparisonExpression( lhs=lhs, operator=eq_op, rhs=rhs, source_location=location, ) return BoolExpressionBuilder(cmp_expr) def cast_to_bytes(expr: Expression, location: SourceLocation | None = None) -> ReinterpretCast: return ReinterpretCast( expr=expr, wtype=wtypes.bytes_wtype, source_location=location or expr.source_location )
algorandfoundation/puya
src/puyapy/awst_build/eb/_utils.py
Python
NOASSERTION
3,879
# ruff: noqa: F403 from puyapy.awst_build.eb.arc4.abi_call import * from puyapy.awst_build.eb.arc4.address import * from puyapy.awst_build.eb.arc4.bool import * from puyapy.awst_build.eb.arc4.dynamic_array import * from puyapy.awst_build.eb.arc4.dynamic_bytes import * from puyapy.awst_build.eb.arc4.emit import * from puyapy.awst_build.eb.arc4.static_array import * from puyapy.awst_build.eb.arc4.string import * from puyapy.awst_build.eb.arc4.struct import * from puyapy.awst_build.eb.arc4.tuple import * from puyapy.awst_build.eb.arc4.ufixed import * from puyapy.awst_build.eb.arc4.uint import *
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/__init__.py
Python
NOASSERTION
599
import abc import typing from abc import ABC from collections.abc import Sequence import mypy.nodes import typing_extensions from puya import log from puya.awst.nodes import ( BytesConstant, BytesEncoding, CheckedMaybe, Copy, Expression, IndexExpression, ) from puya.errors import CodeError from puya.parse import SourceLocation from puyapy.awst_build import intrinsic_factory, pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import FunctionBuilder from puyapy.awst_build.eb._bytes_backed import ( BytesBackedInstanceExpressionBuilder, BytesBackedTypeBuilder, ) from puyapy.awst_build.eb._utils import ( compare_bytes, compare_expr_bytes, dummy_value, resolve_negative_literal_index, ) from puyapy.awst_build.eb.factories import builder_for_instance from puyapy.awst_build.eb.interface import BuilderComparisonOp, InstanceBuilder, NodeBuilder logger = log.get_logger(__name__) _TPyType_co = typing_extensions.TypeVar( "_TPyType_co", bound=pytypes.PyType, default=pytypes.PyType, covariant=True ) class ARC4TypeBuilder(BytesBackedTypeBuilder[_TPyType_co], abc.ABC): def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: match name: case "from_log": return ARC4FromLogBuilder(location, self.produces()) case _: return super().member_access(name, location) class ARC4FromLogBuilder(FunctionBuilder): def __init__(self, location: SourceLocation, typ: pytypes.PyType): super().__init__(location=location) self.typ = typ @classmethod def abi_expr_from_log( cls, typ: pytypes.PyType, value: InstanceBuilder, location: SourceLocation ) -> Expression: tmp_value = value.single_eval().resolve() arc4_value = intrinsic_factory.extract( tmp_value, start=4, loc=location, result_type=typ.checked_wtype(location) ) arc4_prefix = intrinsic_factory.extract(tmp_value, start=0, length=4, loc=location) arc4_prefix_is_valid = compare_expr_bytes( lhs=arc4_prefix, rhs=BytesConstant( value=b"\x15\x1f\x7c\x75", source_location=location, encoding=BytesEncoding.base16, ), op=BuilderComparisonOp.eq, source_location=location, ) return CheckedMaybe.from_tuple_items( expr=arc4_value, check=arc4_prefix_is_valid.resolve(), source_location=location, comment="ARC4 prefix is valid", ) @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.exactly_one_arg_of_type_else_dummy(args, pytypes.BytesType, location) result_expr = self.abi_expr_from_log(self.typ, arg, location) return builder_for_instance(self.typ, result_expr) class CopyBuilder(FunctionBuilder): def __init__(self, expr: Expression, location: SourceLocation, typ: pytypes.PyType): super().__init__(location) self._typ = typ self.expr = expr @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: expect.no_args(args, location) expr_result = Copy(value=self.expr, source_location=location) return builder_for_instance(self._typ, expr_result) def arc4_bool_bytes( builder: InstanceBuilder, false_bytes: bytes, location: SourceLocation, *, negate: bool ) -> InstanceBuilder: lhs = builder.resolve() false_value = BytesConstant( value=false_bytes, encoding=BytesEncoding.base16, wtype=lhs.wtype, source_location=location, ) return compare_expr_bytes( op=BuilderComparisonOp.eq if negate else BuilderComparisonOp.ne, lhs=lhs, rhs=false_value, source_location=location, ) class _ARC4ArrayExpressionBuilder(BytesBackedInstanceExpressionBuilder[pytypes.ArrayType], ABC): @typing.override def iterate(self) -> Expression: if not self.pytype.items_wtype.immutable: # this case is an error raised during AWST validation # adding a front end specific message here to compliment the error message # raise across all front ends logger.info( "use `algopy.urange(<array>.length)` to iterate by index", location=self.source_location, ) return self.resolve() @typing.override def iterable_item_type(self) -> pytypes.PyType: return self.pytype.items @typing.override def index(self, index: InstanceBuilder, location: SourceLocation) -> InstanceBuilder: array_length = self.length(index.source_location) index = resolve_negative_literal_index(index, array_length, location) result_expr = IndexExpression( base=self.resolve(), index=index.resolve(), wtype=self.pytype.items_wtype, source_location=location, ) return builder_for_instance(self.pytype.items, result_expr) @abc.abstractmethod def length(self, location: SourceLocation) -> InstanceBuilder: ... @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: match name: case "length": return self.length(location) case "copy": return CopyBuilder(self.resolve(), location, self.pytype) case _: return super().member_access(name, location) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: return compare_bytes(self=self, op=op, other=other, source_location=location) @typing.override @typing.final def contains(self, item: InstanceBuilder, location: SourceLocation) -> InstanceBuilder: logger.error( "item containment with ARC4 arrays is currently unsupported", location=location ) return dummy_value(pytypes.BoolType, location) @typing.override @typing.final def slice_index( self, begin_index: InstanceBuilder | None, end_index: InstanceBuilder | None, stride: InstanceBuilder | None, location: SourceLocation, ) -> InstanceBuilder: raise CodeError("slicing ARC4 arrays is currently unsupported", location)
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/_base.py
Python
NOASSERTION
6,780
import re import typing from collections.abc import Sequence import attrs import mypy.nodes from puya import log from puya.errors import CodeError, InternalError from puya.parse import SourceLocation from puyapy.awst_build import arc4_utils, pytypes from puyapy.awst_build.arc4_utils import pytype_to_arc4_pytype, split_tuple_types from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._utils import dummy_value from puyapy.awst_build.eb.factories import builder_for_type from puyapy.awst_build.eb.interface import ( InstanceBuilder, NodeBuilder, StaticSizedCollectionBuilder, ) from puyapy.awst_build.utils import maybe_resolve_literal logger = log.get_logger(__name__) _VALID_NAME_PATTERN = re.compile("^[_A-Za-z][A-Za-z0-9_]*$") def _pytype_to_arc4_pytype(typ: pytypes.PyType, sig: attrs.AttrsInstance) -> pytypes.PyType: assert isinstance(sig, ARC4Signature) def on_error(bad_type: pytypes.PyType) -> typing.Never: raise CodeError(f"invalid return type for an ARC4 method: {bad_type}", sig.source_location) return arc4_utils.pytype_to_arc4_pytype(typ, on_error) def _pytypes_to_arc4_pytypes( types: Sequence[pytypes.PyType], sig: attrs.AttrsInstance ) -> Sequence[pytypes.PyType]: return tuple(_pytype_to_arc4_pytype(t, sig) for t in types) @attrs.frozen(kw_only=True) class ARC4Signature: source_location: SourceLocation | None method_name: str arg_types: Sequence[pytypes.PyType] = attrs.field( converter=attrs.Converter(_pytypes_to_arc4_pytypes, takes_self=True) # type: ignore[misc] ) return_type: pytypes.PyType = attrs.field( converter=attrs.Converter(_pytype_to_arc4_pytype, takes_self=True) # type: ignore[misc] ) @property def method_selector(self) -> str: args = ",".join(map(arc4_utils.pytype_to_arc4, self.arg_types)) return_type = arc4_utils.pytype_to_arc4(self.return_type) return f"{self.method_name}({args}){return_type}" def convert_args( self, native_args: Sequence[NodeBuilder], *, expect_itxn_args: bool = False, ) -> Sequence[InstanceBuilder]: num_args = len(native_args) num_sig_args = len(self.arg_types) if num_sig_args != num_args: logger.error( f"expected {num_sig_args} ABI argument{'' if num_sig_args == 1 else 's'}," f" got {num_args}", location=self.source_location, ) arg_types = ( list(map(_gtxn_to_itxn, self.arg_types)) if expect_itxn_args else self.arg_types ) arc4_args = [ _implicit_arc4_conversion(arg, pt) for arg, pt in zip(native_args, arg_types, strict=False) ] return arc4_args def _gtxn_to_itxn(pytype: pytypes.PyType) -> pytypes.PyType: if isinstance(pytype, pytypes.GroupTransactionType): return pytypes.InnerTransactionFieldsetTypes[pytype.transaction_type] return pytype def get_arc4_signature( method: NodeBuilder, native_args: Sequence[NodeBuilder], loc: SourceLocation ) -> tuple[str, ARC4Signature]: method_sig = expect.simple_string_literal(method, default=expect.default_raise) method_name, maybe_args, maybe_returns = _split_signature(method_sig, method.source_location) if maybe_args is None: arg_types = [ _implicit_arc4_type_conversion( expect.instance_builder(na, default=expect.default_raise).pytype, loc ) for na in native_args ] elif maybe_args: arg_types = [arc4_utils.arc4_to_pytype(a, loc) for a in split_tuple_types(maybe_args)] else: # args are specified but empty arg_types = [] return_type = ( arc4_utils.arc4_to_pytype(maybe_returns, loc) if maybe_returns else pytypes.NoneType ) return method_sig, ARC4Signature( method_name=method_name, arg_types=arg_types, return_type=return_type, source_location=loc ) def _implicit_arc4_type_conversion(typ: pytypes.PyType, loc: SourceLocation) -> pytypes.PyType: match typ: case pytypes.StrLiteralType: return pytypes.ARC4StringType case pytypes.BytesLiteralType: return pytypes.ARC4DynamicBytesType case pytypes.IntLiteralType: return pytypes.ARC4UIntN_Aliases[64] # convert an inner txn type to the equivalent group txn type case pytypes.InnerTransactionFieldsetType(transaction_type=txn_type): return pytypes.GroupTransactionTypes[txn_type] def on_error(invalid_pytype: pytypes.PyType) -> typing.Never: raise CodeError( f"{invalid_pytype} is not an ARC4 type and no implicit ARC4 conversion possible", loc ) return pytype_to_arc4_pytype(typ, on_error) def _inner_transaction_type_matches(instance: pytypes.PyType, target: pytypes.PyType) -> bool: if not isinstance(instance, pytypes.InnerTransactionFieldsetType): return False if not isinstance(target, pytypes.InnerTransactionFieldsetType): return False return ( instance.transaction_type == target.transaction_type or instance.transaction_type is None or target.transaction_type is None ) def _implicit_arc4_conversion( operand: NodeBuilder, target_type: pytypes.PyType ) -> InstanceBuilder: from puya.awst.wtypes import ARC4Type instance = expect.instance_builder(operand, default=expect.default_dummy_value(target_type)) instance = _maybe_resolve_arc4_literal(instance, target_type) if target_type <= instance.pytype: return instance target_wtype = target_type.wtype if isinstance(target_type, pytypes.TransactionRelatedType): if _inner_transaction_type_matches(instance.pytype, target_type): return instance else: logger.error( f"expected type {target_type}, got type {instance.pytype}", location=instance.source_location, ) return dummy_value(target_type, instance.source_location) if not isinstance(target_wtype, ARC4Type): raise InternalError( "implicit_operand_conversion expected target_type to be an ARC-4 type," f" got {target_type}", instance.source_location, ) if isinstance(instance.pytype.wtype, ARC4Type): logger.error( f"expected type {target_type}, got type {instance.pytype}", location=instance.source_location, ) return dummy_value(target_type, instance.source_location) if not target_wtype.can_encode_type(instance.pytype.checked_wtype(instance.source_location)): logger.error( f"cannot encode {instance.pytype} to {target_type}", location=instance.source_location ) return dummy_value(target_type, instance.source_location) if ( isinstance(target_type, pytypes.StructType) and isinstance(instance.pytype, pytypes.TupleType) and len(target_type.types) == len(instance.pytype.items) ): # Special handling to map tuples (named and unnamed) to arc4 structs # instance builder for TupleType should be a StaticSizedCollectionBuilder assert isinstance(instance, StaticSizedCollectionBuilder) conversion_args = [ _implicit_arc4_conversion(item, item_target_typ) for item, item_target_typ in zip( instance.iterate_static(), target_type.types, strict=True ) ] else: conversion_args = [instance] target_type_builder = builder_for_type(target_type, instance.source_location) return target_type_builder.call( args=conversion_args, arg_names=[None] * len(conversion_args), arg_kinds=[mypy.nodes.ARG_POS] * len(conversion_args), location=instance.source_location, ) def _maybe_resolve_arc4_literal( operand: InstanceBuilder, target_type: pytypes.PyType ) -> InstanceBuilder: """Handles special case of resolving a literal tuple into an arc4 tuple""" from puyapy.awst_build.eb.tuple import TupleLiteralBuilder if isinstance(operand, TupleLiteralBuilder) and isinstance(target_type, pytypes.ARC4TupleType): resolved_items = [ _maybe_resolve_arc4_literal(item, item_type) for item, item_type in zip(operand.iterate_static(), target_type.items, strict=True) ] return TupleLiteralBuilder(resolved_items, operand.source_location) return maybe_resolve_literal(operand, target_type) def _split_signature( signature: str, location: SourceLocation ) -> tuple[str, str | None, str | None]: """Splits signature into name, args and returns""" level = 0 last_idx = 0 name: str = "" args: str | None = None returns: str | None = None for idx, tok in enumerate(signature): if tok == "(": level += 1 if level == 1: if not name: name = signature[:idx] last_idx = idx + 1 elif tok == ")": level -= 1 if level == 0: if args is None: args = signature[last_idx:idx] elif returns is None: returns = signature[last_idx - 1 : idx + 1] last_idx = idx + 1 if last_idx < len(signature): remaining = signature[last_idx:] if remaining: if not name: name = remaining elif args is None: raise CodeError( f"invalid signature, args not well defined: {name=}, {remaining=}", location ) elif returns: raise CodeError( f"invalid signature, text after returns:" f" {name=}, {args=}, {returns=}, {remaining=}", location, ) else: returns = remaining if not name or not _VALID_NAME_PATTERN.match(name): logger.error(f"invalid signature: {name=}", location=location) return name, args, returns
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/_utils.py
Python
NOASSERTION
10,173
import typing from collections.abc import Iterable, Mapping, Sequence, Set import attrs import mypy.nodes from puya import log from puya.avm import OnCompletionAction, TransactionType from puya.awst import wtypes from puya.awst.nodes import ( ARC4CreateOption, ARC4Decode, ARC4Encode, ARC4MethodConfig, BytesConstant, BytesEncoding, CompiledContract, CreateInnerTransaction, Expression, IntegerConstant, MethodConstant, SubmitInnerTransaction, TupleExpression, TupleItemExpression, UInt64Constant, ) from puya.awst.txn_fields import TxnField from puya.errors import CodeError from puya.parse import SourceLocation, sequential_source_locations_merge from puya.program_refs import ContractReference from puya.utils import StableSet from puyapy.awst_build import constants, pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import FunctionBuilder from puyapy.awst_build.eb.arc4._base import ARC4FromLogBuilder from puyapy.awst_build.eb.arc4._utils import ARC4Signature, get_arc4_signature from puyapy.awst_build.eb.bytes import BytesExpressionBuilder from puyapy.awst_build.eb.compiled import ( APP_ALLOCATION_FIELDS, PROGRAM_FIELDS, CompiledContractExpressionBuilder, ) from puyapy.awst_build.eb.contracts import ContractTypeExpressionBuilder from puyapy.awst_build.eb.factories import builder_for_instance from puyapy.awst_build.eb.interface import ( InstanceBuilder, LiteralBuilder, NodeBuilder, TypeBuilder, ) from puyapy.awst_build.eb.subroutine import BaseClassSubroutineInvokerExpressionBuilder from puyapy.awst_build.eb.transaction import InnerTransactionExpressionBuilder from puyapy.awst_build.eb.transaction.itxn_args import PYTHON_ITXN_ARGUMENTS from puyapy.awst_build.eb.tuple import TupleExpressionBuilder, TupleLiteralBuilder from puyapy.awst_build.eb.uint64 import UInt64ExpressionBuilder from puyapy.models import ( ARC4ABIMethodData, ARC4BareMethodData, ARC4MethodData, ContractFragmentBase, ContractFragmentMethod, ) logger = log.get_logger(__name__) _FIELD_TO_ITXN_ARGUMENT = {arg.field: arg for arg in PYTHON_ITXN_ARGUMENTS.values()} _ABI_CALL_TRANSACTION_FIELDS = [ TxnField.ApplicationID, TxnField.OnCompletion, TxnField.ApprovalProgramPages, TxnField.ClearStateProgramPages, TxnField.GlobalNumUint, TxnField.GlobalNumByteSlice, TxnField.LocalNumUint, TxnField.LocalNumByteSlice, TxnField.ExtraProgramPages, TxnField.Fee, TxnField.Sender, TxnField.Note, TxnField.RekeyTo, ] _ARC4_CREATE_TRANSACTION_FIELDS = [ TxnField.OnCompletion, TxnField.Fee, TxnField.Sender, TxnField.Note, TxnField.RekeyTo, ] _ARC4_UPDATE_TRANSACTION_FIELDS = [ TxnField.ApplicationID, TxnField.Fee, TxnField.Sender, TxnField.Note, TxnField.RekeyTo, ] _COMPILED_KWARG = "compiled" class ARC4ClientTypeBuilder(TypeBuilder): def __init__( self, typ: pytypes.PyType, source_location: SourceLocation, fragment: ContractFragmentBase ): assert pytypes.ARC4ClientBaseType in typ.bases super().__init__(typ, source_location) self.fragment: typing.Final = fragment @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: raise CodeError("ARC4Client subclasses cannot be instantiated", location) @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: method = self.fragment.resolve_method(name) if method is None: return super().member_access(name, location) return ARC4ClientMethodExpressionBuilder(method, location) class ARC4ClientMethodExpressionBuilder(FunctionBuilder): def __init__( self, method: ContractFragmentMethod, location: SourceLocation, ): super().__init__(location) self.method: typing.Final = method @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: raise CodeError( f"can't invoke client methods directly, use {constants.CLS_ARC4_ABI_CALL}", location ) class ABICallGenericTypeBuilder(FunctionBuilder): @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: return _abi_call(args, arg_names, location, return_type_annotation=pytypes.NoneType) class ABICallTypeBuilder(FunctionBuilder): def __init__(self, typ: pytypes.PyType, location: SourceLocation): assert isinstance(typ, pytypes.PseudoGenericFunctionType) self._return_type_annotation = typ.return_type super().__init__(location) @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: return _abi_call( args, arg_names, location, return_type_annotation=self._return_type_annotation ) def _get_python_kwargs(fields: Sequence[TxnField]) -> Set[str]: return StableSet.from_iter( arg for arg, param in PYTHON_ITXN_ARGUMENTS.items() if param.field in fields ) class _ARC4CompilationFunctionBuilder(FunctionBuilder): allowed_fields: Sequence[TxnField] @property def allowed_kwargs(self) -> Set[str]: return {_COMPILED_KWARG, *_get_python_kwargs(self.allowed_fields)} @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: # if on_completion not allowed, it must be an update is_update = TxnField.OnCompletion not in self.allowed_fields method_or_type, abi_args, kwargs = _get_method_abi_args_and_kwargs( args, arg_names, self.allowed_kwargs ) compiled = None if compiled_node := kwargs.pop(_COMPILED_KWARG, None): compiled = expect.argument_of_type_else_dummy( compiled_node, pytypes.CompiledContractType ) match method_or_type: case None: raise CodeError("missing required positional argument 'method'", location) case BaseClassSubroutineInvokerExpressionBuilder(method=fmethod, cref=contract_ref): method_call = _get_arc4_method_call(fmethod, abi_args, location) case ContractTypeExpressionBuilder( fragment=fragment, pytype=pytypes.TypeType(typ=typ) ) if pytypes.ARC4ContractBaseType < typ: contract_ref = fragment.id method_call = _get_lifecycle_method_call( fragment, abi_args, kind="update" if is_update else "create", location=method_or_type.source_location, ) case other: expect.not_this_type(other, default=expect.default_raise) if compiled is None: compiled = CompiledContractExpressionBuilder( CompiledContract( contract=contract_ref, wtype=pytypes.CompiledContractType.wtype, source_location=location, ) ) else: _warn_if_different_contract( compiled, contract_ref, related_location=method_or_type.source_location ) field_nodes = {PYTHON_ITXN_ARGUMENTS[kwarg].field: node for kwarg, node in kwargs.items()} if is_update: _add_on_completion(field_nodes, OnCompletionAction.UpdateApplication, location) # if on_completion is not set but can be inferred from config then use that elif ( TxnField.OnCompletion not in field_nodes and (on_completion := _get_singular_on_complete(method_call.config)) is not None ): _add_on_completion(field_nodes, on_completion, location) compiled = compiled.single_eval() for member_name, field in PROGRAM_FIELDS.items(): field_nodes[field] = compiled.member_access(member_name, location) # is creating if not is_update: # add all app allocation fields for member_name, field in APP_ALLOCATION_FIELDS.items(): field_nodes[field] = compiled.member_access(member_name, location) _validate_transaction_kwargs( field_nodes, method_call.config, method_location=method_or_type.source_location, call_location=location, ) return _create_abi_call_expr( abi_args=method_call.arc4_args, arc4_return_type=method_call.arc4_return_type, declared_result_type=method_call.method_return_type, field_nodes=field_nodes, location=location, ) def _warn_if_different_contract( compiled: InstanceBuilder, contract: ContractReference, *, related_location: SourceLocation ) -> None: # naive check for mismatch between method and compiled parameters expr = compiled.resolve() if isinstance(expr, CompiledContract) and expr.contract != contract: logger.warning( "compiled parameter is for a different contract", location=compiled.source_location, ) logger.info("other contract reference", location=related_location) class ARC4CreateFunctionBuilder(_ARC4CompilationFunctionBuilder): allowed_fields: Sequence[TxnField] = _ARC4_CREATE_TRANSACTION_FIELDS class ARC4UpdateFunctionBuilder(_ARC4CompilationFunctionBuilder): allowed_fields: Sequence[TxnField] = _ARC4_UPDATE_TRANSACTION_FIELDS def _abi_call( args: Sequence[NodeBuilder], arg_names: list[str | None], location: SourceLocation, *, return_type_annotation: pytypes.PyType, ) -> InstanceBuilder: method, abi_args, kwargs = _get_method_abi_args_and_kwargs( args, arg_names, _get_python_kwargs(_ABI_CALL_TRANSACTION_FIELDS) ) declared_result_type: pytypes.PyType arc4_config = None match method: case None: raise CodeError("missing required positional argument 'method'", location) case ( ARC4ClientMethodExpressionBuilder(method=fmethod) | BaseClassSubroutineInvokerExpressionBuilder(method=fmethod) ): # in this case the arc4 signature and declared return type are inferred method_call = _get_arc4_method_call(fmethod, abi_args, location) arc4_args = method_call.arc4_args arc4_return_type = method_call.arc4_return_type arc4_config = method_call.config declared_result_type = method_call.method_return_type if return_type_annotation not in (declared_result_type, pytypes.NoneType): logger.error( "mismatch between return type of method and generic parameter", location=location, ) case _: method_str, signature = get_arc4_signature(method, abi_args, location) declared_result_type = return_type_annotation if declared_result_type != pytypes.NoneType: # this will be validated against signature below, by comparing # the generated method_selector against the supplied method_str signature = attrs.evolve(signature, return_type=declared_result_type) if not signature.method_selector.startswith(method_str): logger.error( f"method selector from args '{signature.method_selector}' " f"does not match provided method selector: '{method_str}'", location=method.source_location, ) arc4_args = _method_selector_and_arc4_args(signature, abi_args, location) arc4_return_type = signature.return_type field_nodes = {PYTHON_ITXN_ARGUMENTS[kwarg].field: node for kwarg, node in kwargs.items()} # set on_completion if it can be inferred from config if ( TxnField.OnCompletion not in field_nodes and (on_completion := _get_singular_on_complete(arc4_config)) is not None ): _add_on_completion(field_nodes, on_completion, location) _validate_transaction_kwargs( field_nodes, arc4_config, method_location=method.source_location, call_location=location, ) return _create_abi_call_expr( arc4_return_type=arc4_return_type, abi_args=arc4_args, declared_result_type=declared_result_type, field_nodes=field_nodes, location=location, ) def _get_method_abi_args_and_kwargs( args: Sequence[NodeBuilder], arg_names: list[str | None], allowed_kwargs: Set[str] ) -> tuple[NodeBuilder | None, Sequence[NodeBuilder], dict[str, NodeBuilder]]: method: NodeBuilder | None = None abi_args = list[NodeBuilder]() kwargs = dict[str, NodeBuilder]() for idx, (arg_name, arg) in enumerate(zip(arg_names, args, strict=True)): if arg_name is None: if idx == 0: method = arg else: abi_args.append(arg) elif arg_name in allowed_kwargs: kwargs[arg_name] = arg else: logger.error("unrecognised keyword argument", location=arg.source_location) return method, abi_args, kwargs @attrs.frozen class _ARC4MethodCall: config: ARC4MethodConfig | None arc4_args: Sequence[InstanceBuilder] method_return_type: pytypes.PyType """ Return type as declared on the method, this may not be an ARC4 type due to automatic type conversion """ arc4_return_type: pytypes.PyType """ ARC4 return type """ def _get_arc4_method_call( data: ContractFragmentMethod, abi_args: Sequence[NodeBuilder], location: SourceLocation ) -> _ARC4MethodCall: if data.metadata is None: raise CodeError("not a valid ARC4 method", location) return _map_arc4_method_data_to_call(data.metadata, abi_args, location) def _map_arc4_method_data_to_call( data: ARC4MethodData, abi_args: Sequence[NodeBuilder], location: SourceLocation, ) -> _ARC4MethodCall: match data: case ARC4ABIMethodData() as abi_method_data: signature = ARC4Signature( method_name=abi_method_data.config.name, arg_types=abi_method_data.arc4_argument_types, return_type=abi_method_data.arc4_return_type, source_location=location, ) return _ARC4MethodCall( config=abi_method_data.config, arc4_args=_method_selector_and_arc4_args(signature, abi_args, location), method_return_type=abi_method_data.return_type, arc4_return_type=abi_method_data.arc4_return_type, ) case ARC4BareMethodData() as bare_method_data: _expect_bare_method_args(abi_args) return _ARC4MethodCall( config=bare_method_data.config, arc4_args=[], method_return_type=pytypes.NoneType, arc4_return_type=pytypes.NoneType, ) case other: typing.assert_never(other) def _get_lifecycle_method_call( fragment: ContractFragmentBase, abi_args: Sequence[NodeBuilder], kind: typing.Literal["create", "update"], location: SourceLocation, ) -> _ARC4MethodCall: if kind == "create": possible_methods = list(fragment.find_arc4_method_metadata(can_create=True)) elif kind == "update": possible_methods = list( fragment.find_arc4_method_metadata(oca=OnCompletionAction.UpdateApplication) ) try: single_method, *others = possible_methods except ValueError: raise CodeError(f"could not find {kind} method on {fragment.id}", location) from None if others: raise CodeError( f"found multiple {kind} methods on {fragment.id}, please specify which one to use", location, ) method_call = _map_arc4_method_data_to_call(single_method, abi_args, location) # remove method_return_type from result # so _create_abi_call_expr does not attempt to include any decoded ARC4 result # as per the stubs overload for arc4_create/arc4_update with a Contract type return attrs.evolve(method_call, method_return_type=pytypes.NoneType) def _method_selector_and_arc4_args( signature: ARC4Signature, abi_args: Sequence[NodeBuilder], location: SourceLocation ) -> Sequence[InstanceBuilder]: return [ BytesExpressionBuilder( MethodConstant(value=signature.method_selector, source_location=location) ), *signature.convert_args(abi_args, expect_itxn_args=True), ] def _create_abi_call_expr( *, abi_args: Sequence[InstanceBuilder], arc4_return_type: pytypes.PyType, declared_result_type: pytypes.PyType, field_nodes: dict[TxnField, NodeBuilder], location: SourceLocation, ) -> InstanceBuilder: group = [] array_fields: dict[TxnField, list[Expression]] = { TxnField.ApplicationArgs: [], TxnField.Accounts: [], TxnField.Applications: [], TxnField.Assets: [], } def ref_to_arg(ref_field: TxnField, arg: InstanceBuilder) -> Expression: # TODO: what about references that are used more than once? implicit_offset = 1 if ref_field in (TxnField.Accounts, TxnField.Applications) else 0 ref_list = array_fields[ref_field] ref_index = len(ref_list) ref_list.append(arg.resolve()) return BytesConstant( value=(ref_index + implicit_offset).to_bytes(length=1), encoding=BytesEncoding.base16, source_location=arg.source_location, ) for arg_b in abi_args: arg_expr = None match arg_b.pytype: case pytypes.InnerTransactionFieldsetType(): group.append(arg_b.resolve()) # no arg_expr as txn aren't part of the app args case pytypes.TransactionRelatedType(): logger.error( "only inner transaction types can be used to call another contract", location=arg_b.source_location, ) case pytypes.AssetType: arg_expr = ref_to_arg(TxnField.Assets, arg_b) case pytypes.AccountType: arg_expr = ref_to_arg(TxnField.Accounts, arg_b) case pytypes.ApplicationType: arg_expr = ref_to_arg(TxnField.Applications, arg_b) case _: arg_expr = arg_b.resolve() if arg_expr is not None: array_fields[TxnField.ApplicationArgs].append(arg_expr) txn_type_appl = TransactionType.appl fields: dict[TxnField, Expression] = { TxnField.Fee: UInt64Constant(value=0, source_location=location), TxnField.TypeEnum: UInt64Constant( value=txn_type_appl.value, teal_alias=txn_type_appl.name, source_location=location ), } for arr_field, arr_field_values in array_fields.items(): if arr_field_values: if arr_field == TxnField.ApplicationArgs and len(arr_field_values) > 16: args_to_pack = arr_field_values[15:] arr_field_values[15:] = [ _arc4_tuple_from_items(args_to_pack, _combine_locs(args_to_pack)) ] fields[arr_field] = TupleExpression.from_items( arr_field_values, _combine_locs(arr_field_values) ) for field, field_node in field_nodes.items(): params = _FIELD_TO_ITXN_ARGUMENT[field] if params is None: logger.error("unrecognised keyword argument", location=field_node.source_location) else: fields[field] = params.validate_and_convert(field_node).resolve() itxn_result_pytype = pytypes.InnerTransactionResultTypes[txn_type_appl] create_itxn = CreateInnerTransaction( fields=fields, wtype=pytypes.InnerTransactionFieldsetTypes[txn_type_appl].wtype, source_location=location, ) group.append(create_itxn) if len(group) == 1: itxn_builder: InstanceBuilder = InnerTransactionExpressionBuilder( SubmitInnerTransaction(itxns=group, source_location=location), itxn_result_pytype ) else: itxn_types = [] for itxn in group: assert isinstance(itxn.wtype, wtypes.WInnerTransactionFields) itxn_types.append(pytypes.InnerTransactionResultTypes[itxn.wtype.transaction_type]) itxn_tuple_result_pytype = pytypes.GenericTupleType.parameterise( itxn_types, location, ) itxn_tuple_builder = TupleExpressionBuilder( SubmitInnerTransaction(itxns=group, source_location=location), itxn_tuple_result_pytype, ).single_eval() itxn_builder = InnerTransactionExpressionBuilder( TupleItemExpression( base=itxn_tuple_builder.resolve(), index=-1, source_location=location, ), itxn_result_pytype, ) if declared_result_type == pytypes.NoneType: return itxn_builder itxn_builder = itxn_builder.single_eval() assert isinstance(itxn_builder, InnerTransactionExpressionBuilder) last_log = itxn_builder.get_field_value(TxnField.LastLog, pytypes.BytesType, location) abi_result = ARC4FromLogBuilder.abi_expr_from_log(arc4_return_type, last_log, location) # the declared result type may be different to the arc4 signature return type # due to automatic conversion of ARC4 -> native types if declared_result_type != arc4_return_type: abi_result = ARC4Decode( value=abi_result, wtype=declared_result_type.checked_wtype(location), source_location=location, ) abi_result_builder = builder_for_instance(declared_result_type, abi_result) return TupleLiteralBuilder((abi_result_builder, itxn_builder), location) def _combine_locs(exprs: Sequence[Expression | NodeBuilder]) -> SourceLocation: return sequential_source_locations_merge(a.source_location for a in exprs) def _arc4_tuple_from_items( items: Sequence[Expression], source_location: SourceLocation ) -> ARC4Encode: # TODO: should we just allow TupleExpression to have an ARCTuple wtype? args_tuple = TupleExpression.from_items(items, source_location) return ARC4Encode( value=args_tuple, wtype=wtypes.ARC4Tuple(types=args_tuple.wtype.types, source_location=source_location), source_location=source_location, ) def _expect_bare_method_args(abi_args: Sequence[NodeBuilder]) -> None: if abi_args: logger.error("unexpected args for bare method", location=_combine_locs(abi_args)) def _validate_transaction_kwargs( field_nodes: Mapping[TxnField, NodeBuilder], arc4_config: ARC4MethodConfig | None, *, method_location: SourceLocation, call_location: SourceLocation, ) -> None: # note these values may be None which indicates their value is unknown at compile time on_completion = _get_on_completion(field_nodes) is_update = on_completion == OnCompletionAction.UpdateApplication is_create = _is_creating(field_nodes) if is_create: # app_id not provided but method doesn't support creating if arc4_config and arc4_config.create == ARC4CreateOption.disallow: logger.error("method cannot be used to create application", location=method_location) # required args for creation missing else: _check_program_fields_are_present( "missing required arguments to create app", field_nodes, call_location ) if is_update: if arc4_config and ( OnCompletionAction.UpdateApplication not in arc4_config.allowed_completion_types ): logger.error("method cannot be used to update application", location=method_location) else: _check_program_fields_are_present( "missing required arguments to update app", field_nodes, call_location ) # on_completion not valid for arc4_config elif ( on_completion is not None and arc4_config and on_completion not in arc4_config.allowed_completion_types ): arg = field_nodes[TxnField.OnCompletion] logger.error( "on completion action is not supported by ARC4 method being called", location=arg.source_location, ) logger.info("method ARC4 configuration", location=arc4_config.source_location) # programs provided when known not to be creating or updating if is_create is False and is_update is False: _check_fields_not_present( "provided argument is only valid when creating or updating an application", PROGRAM_FIELDS.values(), field_nodes, ) if is_create is False: _check_fields_not_present( "provided argument is only valid when creating an application", APP_ALLOCATION_FIELDS.values(), field_nodes, ) def _check_program_fields_are_present( error_message: str, field_nodes: Mapping[TxnField, NodeBuilder], location: SourceLocation ) -> None: if missing_fields := [field for field in PROGRAM_FIELDS.values() if field not in field_nodes]: logger.error( f"{error_message}: {', '.join(_get_python_kwargs(missing_fields))}", location=location, ) def _check_fields_not_present( error_message: str, fields_to_omit: Iterable[TxnField], field_nodes: Mapping[TxnField, NodeBuilder], ) -> None: for field in fields_to_omit: if node := field_nodes.get(field): logger.error(error_message, location=node.source_location) def _get_singular_on_complete(config: ARC4MethodConfig | None) -> OnCompletionAction | None: if config: try: (on_complete,) = config.allowed_completion_types except ValueError: pass else: return on_complete return None def _get_on_completion(field_nodes: Mapping[TxnField, NodeBuilder]) -> OnCompletionAction | None: """ Returns OnCompletionAction if it is statically known, otherwise returns None """ match field_nodes.get(TxnField.OnCompletion): case None: return OnCompletionAction.NoOp case InstanceBuilder(pytype=pytypes.OnCompleteActionType) as eb: value = eb.resolve() if isinstance(value, IntegerConstant): return OnCompletionAction(value.value) return None def _is_creating(field_nodes: Mapping[TxnField, NodeBuilder]) -> bool | None: """ Returns app_id == 0 if app_id is statically known, otherwise returns None """ match field_nodes.get(TxnField.ApplicationID): case None: return True case LiteralBuilder(value=int(app_id)): return app_id == 0 return None def _add_on_completion( field_nodes: dict[TxnField, NodeBuilder], on_complete: OnCompletionAction, location: SourceLocation, ) -> None: # if NoOp can just omit if on_complete == OnCompletionAction.NoOp: return field_nodes[TxnField.OnCompletion] = UInt64ExpressionBuilder( UInt64Constant( source_location=location, value=on_complete.value, teal_alias=on_complete.name ), enum_type=pytypes.OnCompleteActionType, )
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/abi_call.py
Python
NOASSERTION
28,228
import typing from collections.abc import Sequence import mypy.nodes from puya import log, utils from puya.algo_constants import ENCODED_ADDRESS_LENGTH from puya.awst import wtypes from puya.awst.nodes import ( AddressConstant, CheckedMaybe, Expression, NumericComparison, NumericComparisonExpression, ReinterpretCast, UInt64Constant, ) from puya.parse import SourceLocation from puyapy.awst_build import intrinsic_factory, pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._bytes_backed import BytesBackedTypeBuilder from puyapy.awst_build.eb._utils import compare_expr_bytes from puyapy.awst_build.eb.arc4.static_array import StaticArrayExpressionBuilder from puyapy.awst_build.eb.interface import ( BuilderComparisonOp, InstanceBuilder, LiteralBuilder, NodeBuilder, ) from puyapy.awst_build.eb.reference_types.account import AccountExpressionBuilder logger = log.get_logger(__name__) class AddressTypeBuilder(BytesBackedTypeBuilder[pytypes.ArrayType]): def __init__(self, location: SourceLocation): super().__init__(pytypes.ARC4AddressType, location) @typing.override def try_convert_literal( self, literal: LiteralBuilder, location: SourceLocation ) -> InstanceBuilder | None: match literal.value: case str(str_value): if not utils.valid_address(str_value): logger.error( f"Invalid address value. Address literals should be" f" {ENCODED_ADDRESS_LENGTH} characters and not include base32 padding", location=literal.source_location, ) expr = AddressConstant( value=str_value, wtype=wtypes.arc4_address_alias, source_location=location, ) return AddressExpressionBuilder(expr) return None @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.at_most_one_arg(args, location) if arg is None: result = _zero_address(location) elif arg.pytype == pytypes.StrLiteralType: return arg.resolve_literal(converter=AddressTypeBuilder(location)) elif arg.pytype == pytypes.AccountType: # Account is a final type result = _address_from_native(arg) else: arg = expect.argument_of_type_else_dummy(arg, pytypes.BytesType) arg = arg.single_eval() is_correct_length = NumericComparisonExpression( operator=NumericComparison.eq, source_location=location, lhs=UInt64Constant(value=32, source_location=location), rhs=intrinsic_factory.bytes_len(arg.resolve(), location), ) result = CheckedMaybe.from_tuple_items( expr=_address_from_native(arg), check=is_correct_length, source_location=location, comment="Address length is 32 bytes", ) return AddressExpressionBuilder(result) class AddressExpressionBuilder(StaticArrayExpressionBuilder): def __init__(self, expr: Expression): super().__init__(expr, pytypes.ARC4AddressType) @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: return compare_expr_bytes( lhs=self.resolve(), op=BuilderComparisonOp.eq if negate else BuilderComparisonOp.ne, rhs=_zero_address(location), source_location=location, ) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: if other.pytype == pytypes.StrLiteralType: rhs = other.resolve_literal(AddressTypeBuilder(other.source_location)).resolve() elif other.pytype == pytypes.AccountType: # Account is a final type rhs = _address_from_native(other) elif pytypes.ARC4AddressType <= other.pytype: rhs = other.resolve() else: return NotImplemented return compare_expr_bytes(lhs=self.resolve(), op=op, rhs=rhs, source_location=location) @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: match name: case "native": return AccountExpressionBuilder(_address_to_native(self)) case _: return super().member_access(name, location) def _zero_address(location: SourceLocation) -> Expression: return intrinsic_factory.zero_address(location, as_type=wtypes.arc4_address_alias) def _address_to_native(builder: InstanceBuilder) -> Expression: assert pytypes.ARC4AddressType <= builder.pytype return ReinterpretCast( expr=builder.resolve(), wtype=wtypes.account_wtype, source_location=builder.source_location, ) def _address_from_native(builder: InstanceBuilder) -> Expression: assert builder.pytype.is_type_or_subtype(pytypes.AccountType, pytypes.BytesType) return ReinterpretCast( expr=builder.resolve(), wtype=wtypes.arc4_address_alias, source_location=builder.source_location, )
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/address.py
Python
NOASSERTION
5,505
import typing from collections.abc import Sequence import mypy.nodes from puya import log from puya.awst import wtypes from puya.awst.nodes import ARC4Decode, ARC4Encode, BoolConstant, Expression from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import NotIterableInstanceExpressionBuilder from puyapy.awst_build.eb._bytes_backed import BytesBackedInstanceExpressionBuilder from puyapy.awst_build.eb._utils import compare_bytes from puyapy.awst_build.eb.arc4._base import ARC4TypeBuilder, arc4_bool_bytes from puyapy.awst_build.eb.bool import BoolExpressionBuilder from puyapy.awst_build.eb.interface import ( BuilderComparisonOp, InstanceBuilder, LiteralBuilder, NodeBuilder, ) logger = log.get_logger(__name__) class ARC4BoolTypeBuilder(ARC4TypeBuilder): def __init__(self, location: SourceLocation): super().__init__(pytypes.ARC4BoolType, location) @typing.override def try_convert_literal( self, literal: LiteralBuilder, location: SourceLocation ) -> InstanceBuilder | None: match literal.value: case bool(bool_literal): return ARC4BoolExpressionBuilder( BoolConstant( value=bool_literal, source_location=location, wtype=wtypes.arc4_bool_wtype ), ) return None @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.at_most_one_arg(args, location) match arg: case None: expr: Expression = BoolConstant( value=False, source_location=location, wtype=wtypes.arc4_bool_wtype ) case _: arg = expect.argument_of_type_else_dummy(arg, pytypes.BoolType) native_bool = arg.resolve() expr = ARC4Encode( value=native_bool, wtype=wtypes.arc4_bool_wtype, source_location=location ) return ARC4BoolExpressionBuilder(expr) class ARC4BoolExpressionBuilder( NotIterableInstanceExpressionBuilder, BytesBackedInstanceExpressionBuilder ): def __init__(self, expr: Expression): super().__init__(pytypes.ARC4BoolType, expr) @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: return arc4_bool_bytes( self, false_bytes=b"\x00", negate=negate, location=location, ) @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: match name: case "native": return self._native(location) case _: return super().member_access(name, location) def _native(self, location: SourceLocation) -> BoolExpressionBuilder: result_expr: Expression = ARC4Decode( value=self.resolve(), wtype=pytypes.BoolType.wtype, source_location=location, ) return BoolExpressionBuilder(result_expr) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: if other.pytype == pytypes.BoolType: lhs = self._native(self.source_location) return lhs.compare(other, op, location) return compare_bytes(self=self, op=op, other=other, source_location=location)
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/bool.py
Python
NOASSERTION
3,710
import abc import typing from collections.abc import Sequence import mypy.nodes from puya import log from puya.awst import wtypes from puya.awst.nodes import ( ArrayConcat, ArrayExtend, ArrayPop, Expression, ExpressionStatement, IntrinsicCall, NewArray, Statement, TupleExpression, UInt64Constant, ) from puya.errors import CodeError from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import FunctionBuilder, GenericTypeBuilder from puyapy.awst_build.eb._bytes_backed import BytesBackedTypeBuilder from puyapy.awst_build.eb._utils import ( dummy_statement, dummy_value, ) from puyapy.awst_build.eb.arc4._base import _ARC4ArrayExpressionBuilder, arc4_bool_bytes from puyapy.awst_build.eb.factories import builder_for_instance from puyapy.awst_build.eb.interface import BuilderBinaryOp, InstanceBuilder, NodeBuilder from puyapy.awst_build.eb.none import NoneExpressionBuilder from puyapy.awst_build.eb.uint64 import UInt64ExpressionBuilder __all__ = [ "DynamicArrayGenericTypeBuilder", "DynamicArrayTypeBuilder", "DynamicArrayExpressionBuilder", ] logger = log.get_logger(__name__) class DynamicArrayGenericTypeBuilder(GenericTypeBuilder): @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: if not args: raise CodeError("empty arrays require a type annotation to be instantiated", location) element_type = expect.instance_builder(args[0], default=expect.default_raise).pytype typ = pytypes.GenericARC4DynamicArrayType.parameterise([element_type], location) values = tuple(expect.argument_of_type_else_dummy(a, element_type).resolve() for a in args) wtype = typ.wtype assert isinstance(wtype, wtypes.ARC4DynamicArray) return DynamicArrayExpressionBuilder( NewArray(values=values, wtype=wtype, source_location=location), typ ) class DynamicArrayTypeBuilder(BytesBackedTypeBuilder[pytypes.ArrayType]): def __init__(self, typ: pytypes.PyType, location: SourceLocation): assert isinstance(typ, pytypes.ArrayType) assert typ.generic == pytypes.GenericARC4DynamicArrayType assert typ.size is None super().__init__(typ, location) @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: typ = self.produces() values = tuple(expect.argument_of_type_else_dummy(a, typ.items).resolve() for a in args) wtype = typ.wtype assert isinstance(wtype, wtypes.ARC4DynamicArray) return DynamicArrayExpressionBuilder( NewArray(values=values, wtype=wtype, source_location=location), self._pytype ) class DynamicArrayExpressionBuilder(_ARC4ArrayExpressionBuilder): def __init__(self, expr: Expression, typ: pytypes.PyType): assert isinstance(typ, pytypes.ArrayType) super().__init__(typ, expr) @typing.override def length(self, location: SourceLocation) -> InstanceBuilder: length = IntrinsicCall( op_code="extract_uint16", stack_args=[self.resolve(), UInt64Constant(value=0, source_location=location)], wtype=wtypes.uint64_wtype, source_location=location, ) return UInt64ExpressionBuilder(length) @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: match name: case "append": return _Append(self.resolve(), self.pytype, location) case "extend": return _Extend(self.resolve(), self.pytype, location) case "pop": return _Pop(self.resolve(), self.pytype, location) case _: return super().member_access(name, location) @typing.override def augmented_assignment( self, op: BuilderBinaryOp, rhs: InstanceBuilder, location: SourceLocation ) -> Statement: if op != BuilderBinaryOp.add: logger.error(f"unsupported operator for type: {op.value!r}", location=location) return dummy_statement(location) rhs = _match_array_concat_arg(rhs, self.pytype) extend = ArrayExtend( base=self.resolve(), other=rhs.resolve(), wtype=wtypes.void_wtype, source_location=location, ) return ExpressionStatement(expr=extend) @typing.override def binary_op( self, other: InstanceBuilder, op: BuilderBinaryOp, location: SourceLocation, *, reverse: bool, ) -> InstanceBuilder: # only __add__ is implemented, not __radd__ if op != BuilderBinaryOp.add or reverse: return NotImplemented other = _match_array_concat_arg(other, self.pytype) return DynamicArrayExpressionBuilder( ArrayConcat( left=self.resolve(), right=other.resolve(), wtype=self.pytype.wtype, source_location=location, ), self.pytype, ) @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: return arc4_bool_bytes( self, false_bytes=b"\x00\x00", negate=negate, location=location, ) class _ArrayFunc(FunctionBuilder, abc.ABC): def __init__(self, expr: Expression, typ: pytypes.ArrayType, location: SourceLocation): super().__init__(location) self.expr = expr self.typ = typ class _Append(_ArrayFunc): @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.exactly_one_arg_of_type_else_dummy(args, self.typ.items, location) args_expr = arg.resolve() args_tuple = TupleExpression.from_items([args_expr], arg.source_location) return NoneExpressionBuilder( ArrayExtend( base=self.expr, other=args_tuple, wtype=wtypes.void_wtype, source_location=location ) ) class _Pop(_ArrayFunc): @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: expect.no_args(args, location) result_expr = ArrayPop( base=self.expr, wtype=self.typ.items_wtype, source_location=location ) return builder_for_instance(self.typ.items, result_expr) class _Extend(_ArrayFunc): @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.exactly_one_arg(args, location, default=expect.default_none) if arg is None: other = dummy_value(self.typ, location) else: other = _match_array_concat_arg(arg, self.typ) return NoneExpressionBuilder( ArrayExtend( base=self.expr, other=other.resolve(), wtype=wtypes.void_wtype, source_location=location, ) ) def _match_array_concat_arg(arg: InstanceBuilder, arr_type: pytypes.ArrayType) -> InstanceBuilder: expected_item_type = arr_type.items match arg.pytype: case pytypes.SequenceType(items=array_items): okay = expected_item_type <= array_items case pytypes.TupleLikeType(items=tuple_items): okay = all(expected_item_type <= ti for ti in tuple_items) case _: okay = False if okay: return arg logger.error( "expected an array or tuple of the same element type", location=arg.source_location ) return dummy_value(arr_type, arg.source_location)
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/dynamic_array.py
Python
NOASSERTION
8,463
import typing from collections.abc import Sequence import mypy.nodes from puya import algo_constants, log from puya.awst import wtypes from puya.awst.nodes import ( ARC4Decode, ARC4Encode, BytesConstant, BytesEncoding, Expression, NewArray, ) from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._bytes_backed import BytesBackedTypeBuilder from puyapy.awst_build.eb._utils import dummy_value from puyapy.awst_build.eb.arc4.dynamic_array import DynamicArrayExpressionBuilder from puyapy.awst_build.eb.arc4.uint import UIntNTypeBuilder from puyapy.awst_build.eb.bytes import BytesExpressionBuilder from puyapy.awst_build.eb.interface import InstanceBuilder, LiteralBuilder, NodeBuilder logger = log.get_logger(__name__) class DynamicBytesTypeBuilder(BytesBackedTypeBuilder[pytypes.ArrayType]): def __init__(self, location: SourceLocation): super().__init__(pytypes.ARC4DynamicBytesType, location) @typing.override def try_convert_literal( self, literal: LiteralBuilder, location: SourceLocation ) -> InstanceBuilder | None: match literal.value: case bytes(bytes_literal): if len(bytes_literal) > (algo_constants.MAX_BYTES_LENGTH - 2): logger.error( "encoded bytes exceed max length", location=literal.source_location ) bytes_expr = BytesConstant( value=bytes_literal, encoding=BytesEncoding.unknown, source_location=location ) return self._from_bytes_expr(bytes_expr, location) return None @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: match args: case [InstanceBuilder(pytype=pytypes.BytesLiteralType) as lit]: return lit.resolve_literal(DynamicBytesTypeBuilder(location)) case []: bytes_expr: Expression = BytesConstant( value=b"", encoding=BytesEncoding.unknown, source_location=location ) case [ InstanceBuilder(pytype=single_arg_pytype) as eb ] if pytypes.BytesType <= single_arg_pytype: bytes_expr = eb.resolve() case _: non_literal_args = tuple(_coerce_to_byte(a).resolve() for a in args) return DynamicBytesExpressionBuilder( NewArray( values=non_literal_args, wtype=self._arc4_type, source_location=location ) ) return self._from_bytes_expr(bytes_expr, location) @property def _arc4_type(self) -> wtypes.ARC4DynamicArray: typ = self.produces() wtype = typ.wtype assert isinstance(wtype, wtypes.ARC4DynamicArray) return wtype def _from_bytes_expr(self, expr: Expression, location: SourceLocation) -> InstanceBuilder: encode_expr = ARC4Encode(value=expr, wtype=self._arc4_type, source_location=location) return DynamicBytesExpressionBuilder(encode_expr) def _coerce_to_byte(builder: NodeBuilder) -> InstanceBuilder: arg = expect.instance_builder( builder, default=expect.default_dummy_value(pytypes.ARC4ByteType) ) arg = arg.resolve_literal(UIntNTypeBuilder(pytypes.ARC4ByteType, arg.source_location)) match arg: # can't use expect.argument_of_type here, we need a match statement case InstanceBuilder(pytype=pytypes.ARC4UIntNType(bits=8)): return arg case _: logger.error("invalid argument type", location=arg.source_location) return dummy_value(pytypes.ARC4ByteType, arg.source_location) class DynamicBytesExpressionBuilder(DynamicArrayExpressionBuilder): def __init__(self, expr: Expression): super().__init__(expr, pytypes.ARC4DynamicBytesType) def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: match name: case "native": return BytesExpressionBuilder( ARC4Decode( value=self.resolve(), wtype=wtypes.bytes_wtype, source_location=location ) ) case _: return super().member_access(name, location)
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/dynamic_bytes.py
Python
NOASSERTION
4,552
import typing from collections.abc import Sequence import mypy.nodes from puya import log from puya.awst.nodes import Emit from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.arc4_utils import pytype_to_arc4 from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import FunctionBuilder from puyapy.awst_build.eb.arc4._utils import get_arc4_signature from puyapy.awst_build.eb.arc4.struct import ARC4StructTypeBuilder from puyapy.awst_build.eb.interface import InstanceBuilder, NodeBuilder from puyapy.awst_build.eb.none import NoneExpressionBuilder logger = log.get_logger(__name__) class EmitBuilder(FunctionBuilder): @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: first, rest = expect.at_least_one_arg(args, location, default=expect.default_raise) match first: case ( InstanceBuilder(pytype=pytypes.StructType() as struct_type) as event_arg_eb ) if pytypes.ARC4StructBaseType < struct_type: if rest: logger.error( "unexpected additional arguments", location=rest[0].source_location ) case _: method_sig, signature = get_arc4_signature(first, rest, location) if signature.return_type != pytypes.NoneType or method_sig.endswith(")void"): logger.error( "event signatures cannot include return types", location=first.source_location, ) arc4_args = signature.convert_args(rest) # emit requires a struct type, so generate one based on args struct_type = pytypes.StructType( base=pytypes.ARC4StructBaseType, name=signature.method_name, desc=None, fields={ f"field{idx}": arg.pytype for idx, arg in enumerate(arc4_args, start=1) }, frozen=True, source_location=location, ) event_arg_eb = ARC4StructTypeBuilder(struct_type, location).call( args=arc4_args, arg_names=[None] * len(arc4_args), arg_kinds=[mypy.nodes.ARG_POS] * len(arc4_args), location=location, ) event_name = struct_type.name.split(".")[-1] event_sig = f"{event_name}{pytype_to_arc4(event_arg_eb.pytype, location)}" emit = Emit( signature=event_sig, value=event_arg_eb.resolve(), source_location=location, ) return NoneExpressionBuilder(emit)
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/emit.py
Python
NOASSERTION
2,946
import typing from collections.abc import Sequence import mypy.nodes from puya import log from puya.awst import wtypes from puya.awst.nodes import Expression, IndexExpression, NewArray, UInt64Constant from puya.errors import CodeError from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import GenericTypeBuilder from puyapy.awst_build.eb._bytes_backed import BytesBackedTypeBuilder from puyapy.awst_build.eb._utils import constant_bool_and_error from puyapy.awst_build.eb.arc4._base import _ARC4ArrayExpressionBuilder from puyapy.awst_build.eb.factories import builder_for_instance from puyapy.awst_build.eb.interface import ( InstanceBuilder, NodeBuilder, StaticSizedCollectionBuilder, ) from puyapy.awst_build.eb.uint64 import UInt64ExpressionBuilder __all__ = [ "StaticArrayGenericTypeBuilder", "StaticArrayTypeBuilder", "StaticArrayExpressionBuilder", ] logger = log.get_logger(__name__) class StaticArrayGenericTypeBuilder(GenericTypeBuilder): @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: if not args: raise CodeError("empty arrays require a type annotation to be instantiated", location) element_type = expect.instance_builder(args[0], default=expect.default_raise).pytype array_size = len(args) typ = pytypes.GenericARC4StaticArrayType.parameterise( [element_type, pytypes.TypingLiteralType(value=array_size, source_location=None)], location, ) values = tuple(expect.argument_of_type_else_dummy(a, element_type).resolve() for a in args) wtype = typ.wtype assert isinstance(wtype, wtypes.ARC4StaticArray) return StaticArrayExpressionBuilder( NewArray(values=values, wtype=wtype, source_location=location), typ ) class StaticArrayTypeBuilder(BytesBackedTypeBuilder[pytypes.ArrayType]): def __init__(self, typ: pytypes.PyType, location: SourceLocation): assert isinstance(typ, pytypes.ArrayType) assert typ.generic == pytypes.GenericARC4StaticArrayType assert typ.size is not None self._size = typ.size super().__init__(typ, location) @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: typ = self.produces() n_args = expect.exactly_n_args_of_type_else_dummy(args, typ.items, location, self._size) wtype = typ.wtype assert isinstance(wtype, wtypes.ARC4StaticArray) return StaticArrayExpressionBuilder( NewArray( values=tuple(arg.resolve() for arg in n_args), wtype=wtype, source_location=location, ), typ, ) class StaticArrayExpressionBuilder(_ARC4ArrayExpressionBuilder, StaticSizedCollectionBuilder): def __init__(self, expr: Expression, typ: pytypes.PyType): assert isinstance(typ, pytypes.ArrayType) size = typ.size assert size is not None self._size = size super().__init__(typ, expr) @typing.override def length(self, location: SourceLocation) -> InstanceBuilder: return UInt64ExpressionBuilder(UInt64Constant(value=self._size, source_location=location)) @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: return constant_bool_and_error(value=self._size > 0, location=location, negate=negate) @typing.override def iterate_static(self) -> Sequence[InstanceBuilder]: base = self.single_eval().resolve() return [ builder_for_instance( self.pytype.items, IndexExpression( base=base, index=UInt64Constant(value=idx, source_location=self.source_location), wtype=self.pytype.items_wtype, source_location=self.source_location, ), ) for idx in range(self._size) ]
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/static_array.py
Python
NOASSERTION
4,403
import typing from collections.abc import Sequence import mypy.nodes from puya import algo_constants, log from puya.awst import wtypes from puya.awst.nodes import ( ARC4Decode, ARC4Encode, ArrayConcat, BytesAugmentedAssignment, BytesBinaryOperator, Expression, Statement, StringConstant, ) from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import NotIterableInstanceExpressionBuilder from puyapy.awst_build.eb._bytes_backed import BytesBackedInstanceExpressionBuilder from puyapy.awst_build.eb._utils import compare_expr_bytes, dummy_statement from puyapy.awst_build.eb.arc4._base import ARC4TypeBuilder, arc4_bool_bytes from puyapy.awst_build.eb.interface import ( BuilderBinaryOp, BuilderComparisonOp, InstanceBuilder, LiteralBuilder, NodeBuilder, ) from puyapy.awst_build.eb.string import StringExpressionBuilder __all__ = [ "ARC4StringTypeBuilder", "ARC4StringExpressionBuilder", ] logger = log.get_logger(__name__) class ARC4StringTypeBuilder(ARC4TypeBuilder): def __init__(self, location: SourceLocation): super().__init__(pytypes.ARC4StringType, location) @typing.override def try_convert_literal( self, literal: LiteralBuilder, location: SourceLocation ) -> InstanceBuilder | None: match literal.value: case str(literal_value): try: bytes_value = literal_value.encode("utf8") except UnicodeEncodeError as ex: logger.error( # noqa: TRY400 f"invalid UTF-8 string (encoding error: {ex})", location=literal.source_location, ) else: if len(bytes_value) > (algo_constants.MAX_BYTES_LENGTH - 2): logger.error( "encoded string exceeds max byte array length", location=literal.source_location, ) return _arc4_str_literal(literal_value, location) return None @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.at_most_one_arg(args, location) match arg: case InstanceBuilder(pytype=pytypes.StrLiteralType): return arg.resolve_literal(ARC4StringTypeBuilder(location)) case None: return _arc4_str_literal("", location) case _: arg = expect.argument_of_type_else_dummy(arg, pytypes.StringType) return _from_native(arg, location) class ARC4StringExpressionBuilder( NotIterableInstanceExpressionBuilder, BytesBackedInstanceExpressionBuilder ): def __init__(self, expr: Expression): super().__init__(pytypes.ARC4StringType, expr) @typing.override def augmented_assignment( self, op: BuilderBinaryOp, rhs: InstanceBuilder, location: SourceLocation ) -> Statement: if op != BuilderBinaryOp.add: logger.error(f"unsupported operator for type: {op.value!r}", location=location) return dummy_statement(location) rhs = rhs.resolve_literal(ARC4StringTypeBuilder(rhs.source_location)) if pytypes.StringType <= rhs.pytype: value = _from_native(rhs, rhs.source_location).resolve() else: value = expect.argument_of_type_else_dummy(rhs, self.pytype).resolve() return BytesAugmentedAssignment( target=self.resolve_lvalue(), op=BytesBinaryOperator.add, value=value, source_location=location, ) @typing.override def binary_op( self, other: InstanceBuilder, op: BuilderBinaryOp, location: SourceLocation, *, reverse: bool, ) -> InstanceBuilder: if op != BuilderBinaryOp.add: return NotImplemented other = other.resolve_literal(ARC4StringTypeBuilder(other.source_location)) if pytypes.ARC4StringType <= other.pytype: other_expr = other.resolve() elif pytypes.StringType <= other.pytype: other_expr = _from_native(other, other.source_location).resolve() else: return NotImplemented lhs = self.resolve() rhs = other_expr if reverse: (lhs, rhs) = (rhs, lhs) return ARC4StringExpressionBuilder( ArrayConcat( left=lhs, right=rhs, wtype=wtypes.arc4_string_alias, source_location=location, ) ) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: other = other.resolve_literal(ARC4StringTypeBuilder(other.source_location)) if pytypes.ARC4StringType <= other.pytype: lhs: InstanceBuilder = self elif pytypes.StringType <= other.pytype: # when comparing arc4 to native, easier to convert by stripping length prefix lhs = _string_to_native(self, self.source_location) else: return NotImplemented return compare_expr_bytes( lhs=lhs.resolve(), op=op, rhs=other.resolve(), source_location=location, ) @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: return arc4_bool_bytes( self, false_bytes=b"\x00\x00", negate=negate, location=location, ) @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: match name: case "native": return _string_to_native(self, location) case _: return super().member_access(name, location) def _string_to_native( builder: InstanceBuilder, location: SourceLocation ) -> StringExpressionBuilder: assert pytypes.ARC4StringType <= builder.pytype return StringExpressionBuilder( ARC4Decode( value=builder.resolve(), wtype=pytypes.StringType.wtype, source_location=location, ) ) def _arc4_str_literal(value: str, location: SourceLocation) -> InstanceBuilder: return ARC4StringExpressionBuilder( StringConstant(value=value, source_location=location, wtype=wtypes.arc4_string_alias) ) def _from_native(eb: InstanceBuilder, location: SourceLocation) -> InstanceBuilder: assert pytypes.StringType <= eb.pytype return ARC4StringExpressionBuilder( ARC4Encode( value=eb.resolve(), wtype=wtypes.arc4_string_alias, source_location=location, ) )
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/string.py
Python
NOASSERTION
7,076
import typing from collections.abc import Sequence import mypy.nodes from puya import log from puya.awst import wtypes from puya.awst.nodes import Copy, Expression, FieldExpression, NewStruct from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import FunctionBuilder, NotIterableInstanceExpressionBuilder from puyapy.awst_build.eb._bytes_backed import ( BytesBackedInstanceExpressionBuilder, BytesBackedTypeBuilder, ) from puyapy.awst_build.eb._utils import compare_bytes, constant_bool_and_error, dummy_value from puyapy.awst_build.eb.arc4._base import CopyBuilder from puyapy.awst_build.eb.factories import builder_for_instance from puyapy.awst_build.eb.interface import BuilderComparisonOp, InstanceBuilder, NodeBuilder from puyapy.awst_build.utils import get_arg_mapping logger = log.get_logger(__name__) class ARC4StructTypeBuilder(BytesBackedTypeBuilder[pytypes.StructType]): def __init__(self, typ: pytypes.PyType, location: SourceLocation): assert isinstance(typ, pytypes.StructType) assert pytypes.ARC4StructBaseType < typ super().__init__(typ, location) @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: pytype = self.produces() field_mapping, any_missing = get_arg_mapping( required_positional_names=list(pytype.fields), args=args, arg_names=arg_names, call_location=location, raise_on_missing=False, ) if any_missing: return dummy_value(pytype, location) values = { field_name: expect.argument_of_type_else_dummy( field_mapping[field_name], field_type ).resolve() for field_name, field_type in pytype.fields.items() } assert isinstance(pytype.wtype, wtypes.ARC4Struct) expr = NewStruct(wtype=pytype.wtype, values=values, source_location=location) return ARC4StructExpressionBuilder(expr, pytype) class ARC4StructExpressionBuilder( NotIterableInstanceExpressionBuilder[pytypes.StructType], BytesBackedInstanceExpressionBuilder[pytypes.StructType], ): def __init__(self, expr: Expression, typ: pytypes.PyType): assert isinstance(typ, pytypes.StructType) super().__init__(typ, expr) def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: match name: case field_name if field := self.pytype.fields.get(field_name): result_expr = FieldExpression( base=self.resolve(), name=field_name, source_location=location, ) return builder_for_instance(field, result_expr) case "copy": return CopyBuilder(self.resolve(), location, self.pytype) case "_replace": return _Replace(self, self.pytype, location) case _: return super().member_access(name, location) def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: return compare_bytes(self=self, op=op, other=other, source_location=location) def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: return constant_bool_and_error(value=True, location=location, negate=negate) class _Replace(FunctionBuilder): def __init__( self, instance: ARC4StructExpressionBuilder, struct_type: pytypes.StructType, location: SourceLocation, ): super().__init__(location) self.instance = instance self.struct_type = struct_type @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: pytype = self.struct_type field_mapping, _ = get_arg_mapping( optional_kw_only=list(pytype.fields), args=args, arg_names=arg_names, call_location=location, raise_on_missing=False, ) base_expr = self.instance.single_eval().resolve() values = dict[str, Expression]() for field_name, field_pytype in pytype.fields.items(): new_value = field_mapping.get(field_name) if new_value is not None: item_builder = expect.argument_of_type_else_dummy(new_value, field_pytype) item = item_builder.resolve() else: field_wtype = field_pytype.checked_wtype(location) item = FieldExpression(base=base_expr, name=field_name, source_location=location) if not field_wtype.immutable: logger.error( f"mutable field {field_name!r} requires explicit copy", location=location ) # implicitly create a copy node so that there is only one error item = Copy(value=item, source_location=location) values[field_name] = item new_tuple = NewStruct(values=values, wtype=pytype.wtype, source_location=location) return ARC4StructExpressionBuilder(new_tuple, pytype)
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/struct.py
Python
NOASSERTION
5,563
import typing from collections.abc import Sequence import mypy.nodes from puya import log from puya.awst.nodes import ARC4Decode, ARC4Encode, Expression, TupleItemExpression from puya.errors import CodeError from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import GenericTypeBuilder from puyapy.awst_build.eb._bytes_backed import BytesBackedInstanceExpressionBuilder from puyapy.awst_build.eb._utils import compare_bytes, constant_bool_and_error, dummy_value from puyapy.awst_build.eb.arc4._base import ARC4TypeBuilder, CopyBuilder from puyapy.awst_build.eb.factories import builder_for_instance from puyapy.awst_build.eb.interface import ( BuilderComparisonOp, InstanceBuilder, LiteralBuilder, NodeBuilder, StaticSizedCollectionBuilder, ) from puyapy.awst_build.eb.tuple import TupleExpressionBuilder logger = log.get_logger(__name__) class ARC4TupleGenericTypeBuilder(GenericTypeBuilder): @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.exactly_one_arg(args, location, default=expect.default_raise) match arg: case InstanceBuilder(pytype=pytypes.TupleType(items=items)): typ = pytypes.GenericARC4TupleType.parameterise(items, location) return ARC4TupleExpressionBuilder( ARC4Encode(value=arg.resolve(), wtype=typ.wtype, source_location=location), typ ) case _: # don't know expected type, so raise expect.not_this_type(arg, default=expect.default_raise) class ARC4TupleTypeBuilder(ARC4TypeBuilder[pytypes.ARC4TupleType]): def __init__(self, typ: pytypes.PyType, location: SourceLocation): assert isinstance(typ, pytypes.ARC4TupleType) super().__init__(typ, location) @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: typ = self.produces() native_type = pytypes.GenericTupleType.parameterise(typ.items, location) arg = expect.exactly_one_arg( args, location, default=expect.default_dummy_value(native_type) ) wtype = typ.wtype if not wtype.can_encode_type(arg.pytype.checked_wtype(location)): arg = expect.not_this_type(arg, default=expect.default_dummy_value(native_type)) return ARC4TupleExpressionBuilder( ARC4Encode(value=arg.resolve(), wtype=wtype, source_location=location), typ ) class ARC4TupleExpressionBuilder( BytesBackedInstanceExpressionBuilder[pytypes.ARC4TupleType], StaticSizedCollectionBuilder ): def __init__(self, expr: Expression, typ: pytypes.PyType): assert isinstance(typ, pytypes.ARC4TupleType) super().__init__(typ, expr) @typing.override def index(self, index: InstanceBuilder, location: SourceLocation) -> InstanceBuilder: match index: case LiteralBuilder(value=int(index_value)): pass case InstanceBuilder(pytype=pytypes.IntLiteralType): raise CodeError("tuple index must be a simple int literal", index.source_location) case other: expect.not_this_type(other, default=expect.default_raise) try: item_typ = self.pytype.items[index_value] except IndexError: raise CodeError("index out of bounds", index.source_location) from None return builder_for_instance( item_typ, TupleItemExpression( base=self.resolve(), index=index_value, source_location=location, ), ) @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: return constant_bool_and_error(value=True, location=location, negate=negate) @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: match name: case "native": native_pytype = pytypes.GenericTupleType.parameterise(self.pytype.items, location) native_wtype = native_pytype.checked_wtype(location) result_expr: Expression = ARC4Decode( value=self.resolve(), wtype=native_wtype, source_location=location, ) return TupleExpressionBuilder(result_expr, native_pytype) case "copy": return CopyBuilder(self.resolve(), location, self.pytype) case _: return super().member_access(name, location) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: return compare_bytes(self=self, op=op, other=other, source_location=location) @typing.override def contains(self, item: InstanceBuilder, location: SourceLocation) -> InstanceBuilder: logger.error( "item containment with ARC4 tuples is currently unsupported", location=location ) return dummy_value(pytypes.BoolType, location) @typing.override def iterate(self) -> typing.Never: # could only support for homogenous types anyway, in which case use a StaticArray? raise CodeError("iterating ARC4 tuples is currently unsupported", self.source_location) @typing.override def iterate_static(self) -> Sequence[InstanceBuilder]: base = self.single_eval().resolve() return [ builder_for_instance( item_type, TupleItemExpression(base=base, index=idx, source_location=self.source_location), ) for idx, item_type in enumerate(self.pytype.items) ] @typing.override def iterable_item_type(self) -> typing.Never: self.iterate() @typing.override def slice_index( self, begin_index: InstanceBuilder | None, end_index: InstanceBuilder | None, stride: InstanceBuilder | None, location: SourceLocation, ) -> InstanceBuilder: raise CodeError("slicing ARC4 tuples is currently unsupported", location)
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/tuple.py
Python
NOASSERTION
6,588
import decimal import typing from collections.abc import Sequence import mypy.nodes from puya import log from puya.awst import wtypes from puya.awst.nodes import DecimalConstant, Expression from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import NotIterableInstanceExpressionBuilder from puyapy.awst_build.eb._bytes_backed import BytesBackedInstanceExpressionBuilder from puyapy.awst_build.eb._utils import compare_bytes from puyapy.awst_build.eb.arc4._base import ARC4TypeBuilder, arc4_bool_bytes from puyapy.awst_build.eb.interface import ( BuilderComparisonOp, InstanceBuilder, LiteralBuilder, NodeBuilder, ) __all__ = [ "UFixedNxMTypeBuilder", "UFixedNxMExpressionBuilder", ] logger = log.get_logger(__name__) class UFixedNxMTypeBuilder(ARC4TypeBuilder): @typing.override def try_convert_literal( self, literal: LiteralBuilder, location: SourceLocation ) -> InstanceBuilder | None: match literal.value: case str(literal_value): result = self._str_to_decimal_constant( literal_value, error_location=literal.source_location, location=location ) return UFixedNxMExpressionBuilder(result, self.produces()) return None @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.at_most_one_arg(args, location) if arg is None: result = self._str_to_decimal_constant("0.0", location=location) return UFixedNxMExpressionBuilder(result, self.produces()) arg = expect.argument_of_type(arg, pytypes.StrLiteralType, default=expect.default_raise) return arg.resolve_literal(UFixedNxMTypeBuilder(self.produces(), location)) def _str_to_decimal_constant( self, literal_value: str, *, location: SourceLocation, error_location: SourceLocation | None = None, ) -> DecimalConstant: error_location = location or error_location fixed_wtype = self.produces().wtype assert isinstance(fixed_wtype, wtypes.ARC4UFixedNxM) with decimal.localcontext( decimal.Context( prec=160, traps=[ decimal.Rounded, decimal.InvalidOperation, decimal.Overflow, decimal.DivisionByZero, ], ) ): try: d = decimal.Decimal(literal_value) except ArithmeticError: logger.error("invalid decimal literal", location=error_location) # noqa: TRY400 d = decimal.Decimal() try: q = d.quantize(decimal.Decimal(f"1e-{fixed_wtype.m}")) except ArithmeticError: logger.error( # noqa: TRY400 "invalid decimal constant (wrong precision)", location=error_location ) q = decimal.Decimal("0." + "0" * fixed_wtype.m) sign, digits, exponent = q.as_tuple() if sign != 0: # is negative logger.error( "invalid decimal constant (value is negative)", location=error_location ) if not isinstance(exponent, int): # is infinite logger.error( "invalid decimal constant (value is infinite)", location=error_location ) adjusted_int = int("".join(map(str, digits))) if adjusted_int.bit_length() > fixed_wtype.n: logger.error("invalid decimal constant (too many bits)", location=error_location) result = DecimalConstant(value=q, wtype=fixed_wtype, source_location=location) return result class UFixedNxMExpressionBuilder( NotIterableInstanceExpressionBuilder[pytypes.ARC4UFixedNxMType], BytesBackedInstanceExpressionBuilder[pytypes.ARC4UFixedNxMType], ): def __init__(self, expr: Expression, typ: pytypes.PyType): assert isinstance(typ, pytypes.ARC4UFixedNxMType) assert typ.generic in ( pytypes.GenericARC4UFixedNxMType, pytypes.GenericARC4BigUFixedNxMType, ) super().__init__(typ, expr) @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: return arc4_bool_bytes( self, false_bytes=b"\x00" * (self.pytype.bits // 8), negate=negate, location=location, ) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: other = other.resolve_literal(UFixedNxMTypeBuilder(self.pytype, other.source_location)) return compare_bytes(op=op, self=self, other=other, source_location=location)
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/ufixed.py
Python
NOASSERTION
5,138
import typing from collections.abc import Sequence import mypy.nodes from puya import log from puya.awst import wtypes from puya.awst.nodes import ( ARC4Decode, ARC4Encode, Expression, IntegerConstant, NumericComparison, NumericComparisonExpression, ReinterpretCast, ) from puya.parse import SourceLocation from puyapy.awst_build import intrinsic_factory, pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import NotIterableInstanceExpressionBuilder from puyapy.awst_build.eb._bytes_backed import BytesBackedInstanceExpressionBuilder from puyapy.awst_build.eb.arc4._base import ARC4TypeBuilder, arc4_bool_bytes from puyapy.awst_build.eb.bool import BoolExpressionBuilder from puyapy.awst_build.eb.factories import builder_for_instance from puyapy.awst_build.eb.interface import ( BuilderComparisonOp, InstanceBuilder, LiteralBuilder, NodeBuilder, ) __all__ = [ "UIntNTypeBuilder", "UIntNExpressionBuilder", ] logger = log.get_logger(__name__) class UIntNTypeBuilder(ARC4TypeBuilder[pytypes.ARC4UIntNType]): def __init__(self, pytype: pytypes.PyType, location: SourceLocation): assert isinstance(pytype, pytypes.ARC4UIntNType) super().__init__(pytype, location) @typing.override def try_convert_literal( self, literal: LiteralBuilder, location: SourceLocation ) -> InstanceBuilder | None: pytype = self.produces() match literal.value: case int(int_value): if int_value < 0 or int_value.bit_length() > pytype.bits: logger.error(f"invalid {pytype} value", location=literal.source_location) # take int() of the value since it could match a bool also expr = IntegerConstant( value=int(int_value), wtype=pytype.wtype, source_location=location ) return UIntNExpressionBuilder(expr, pytype) return None @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.at_most_one_arg(args, location) typ = self.produces() wtype = typ.wtype match arg: case InstanceBuilder(pytype=pytypes.IntLiteralType): return arg.resolve_literal(UIntNTypeBuilder(typ, location)) case None: expr: Expression = IntegerConstant(value=0, wtype=wtype, source_location=location) case _: encodeable = expect.argument_of_type_else_dummy( arg, pytypes.UInt64Type, pytypes.BigUIntType, pytypes.BoolType ) expr = ARC4Encode( value=encodeable.resolve(), wtype=wtype, source_location=location ) return UIntNExpressionBuilder(expr, typ) class UIntNExpressionBuilder( NotIterableInstanceExpressionBuilder[pytypes.ARC4UIntNType], BytesBackedInstanceExpressionBuilder[pytypes.ARC4UIntNType], ): def __init__(self, expr: Expression, typ: pytypes.PyType): assert isinstance(typ, pytypes.ARC4UIntNType) super().__init__(typ, expr) @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: match name: case "native": result_expr = ARC4Decode( value=self.resolve(), wtype=self.pytype.native_type.wtype, source_location=location, ) return builder_for_instance(self.pytype.native_type, result_expr) case _: return super().member_access(name, location) @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: return arc4_bool_bytes( self, false_bytes=b"\x00" * (self.pytype.bits // 8), location=location, negate=negate, ) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: other = other.resolve_literal(UIntNTypeBuilder(self.pytype, other.source_location)) if pytypes.BigUIntType <= other.pytype: other_expr = other.resolve() elif other.pytype.is_type_or_subtype(pytypes.BoolType, pytypes.UInt64Type): other_expr = intrinsic_factory.itob_as(other.resolve(), wtypes.biguint_wtype, location) elif isinstance(other.pytype, pytypes.ARC4UIntNType): other_expr = ReinterpretCast( expr=other.resolve(), wtype=wtypes.biguint_wtype, source_location=other.source_location, ) else: return NotImplemented cmp_expr = NumericComparisonExpression( operator=NumericComparison(op.value), lhs=ReinterpretCast( expr=self.resolve(), wtype=wtypes.biguint_wtype, source_location=self.source_location, ), rhs=other_expr, source_location=location, ) return BoolExpressionBuilder(cmp_expr)
algorandfoundation/puya
src/puyapy/awst_build/eb/arc4/uint.py
Python
NOASSERTION
5,340
import typing from collections.abc import Sequence import mypy.nodes from puya.awst import wtypes from puya.awst.nodes import Expression from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb._base import GenericTypeBuilder from puyapy.awst_build.eb.interface import InstanceBuilder, NodeBuilder, TypeBuilder class ArrayGenericTypeBuilder(GenericTypeBuilder): @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: raise NotImplementedError class ArrayTypeBuilder(TypeBuilder[pytypes.ArrayType]): def __init__(self, typ: pytypes.PyType, location: SourceLocation): assert isinstance(typ, pytypes.ArrayType) assert typ.generic == pytypes.GenericArrayType wtype = typ.wtype assert isinstance(wtype, wtypes.WArray) self._wtype = wtype super().__init__(typ, location) @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: raise NotImplementedError def ArrayExpressionBuilder(expr: Expression, typ: pytypes.PyType) -> typing.Never: # noqa: N802 raise NotImplementedError
algorandfoundation/puya
src/puyapy/awst_build/eb/array.py
Python
NOASSERTION
1,441
import typing from collections.abc import Sequence import attrs import mypy.nodes from puya import algo_constants, log from puya.awst import wtypes from puya.awst.nodes import ( BigUIntAugmentedAssignment, BigUIntBinaryOperation, BigUIntBinaryOperator, BigUIntConstant, Expression, NumericComparison, NumericComparisonExpression, Statement, ) from puya.parse import SourceLocation from puyapy.awst_build import intrinsic_factory, pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import NotIterableInstanceExpressionBuilder from puyapy.awst_build.eb._bytes_backed import ( BytesBackedInstanceExpressionBuilder, BytesBackedTypeBuilder, ) from puyapy.awst_build.eb._utils import dummy_statement from puyapy.awst_build.eb.bool import BoolExpressionBuilder from puyapy.awst_build.eb.interface import ( BuilderBinaryOp, BuilderComparisonOp, BuilderUnaryOp, InstanceBuilder, LiteralBuilder, NodeBuilder, ) logger = log.get_logger(__name__) class BigUIntTypeBuilder(BytesBackedTypeBuilder): def __init__(self, location: SourceLocation): super().__init__(pytypes.BigUIntType, location) @typing.override def try_convert_literal( self, literal: LiteralBuilder, location: SourceLocation ) -> InstanceBuilder | None: match literal.value: case int(int_value): pytype = self.produces() if int_value < 0 or int_value.bit_length() > algo_constants.MAX_BIGUINT_BITS: logger.error(f"invalid {pytype} value", location=literal.source_location) expr = BigUIntConstant(value=int(int_value), source_location=location) return BigUIntExpressionBuilder(expr) return None @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.at_most_one_arg(args, location) match arg: case InstanceBuilder(pytype=pytypes.IntLiteralType): return arg.resolve_literal(converter=BigUIntTypeBuilder(location)) case None: value: Expression = BigUIntConstant(value=0, source_location=location) case _: arg = expect.argument_of_type_else_dummy(arg, pytypes.UInt64Type) value = _uint64_to_biguint(arg, location) return BigUIntExpressionBuilder(value) class BigUIntExpressionBuilder( NotIterableInstanceExpressionBuilder, BytesBackedInstanceExpressionBuilder ): def __init__(self, expr: Expression): super().__init__(pytypes.BigUIntType, expr) @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: cmp_expr = NumericComparisonExpression( lhs=self.resolve(), operator=NumericComparison.eq if negate else NumericComparison.ne, rhs=BigUIntConstant(value=0, source_location=location), source_location=location, ) return BoolExpressionBuilder(cmp_expr) @typing.override def unary_op(self, op: BuilderUnaryOp, location: SourceLocation) -> InstanceBuilder: if op == BuilderUnaryOp.positive: # unary + is allowed, but for the current types it has no real impact # so just expand the existing expression to include the unary operator return BigUIntExpressionBuilder(attrs.evolve(self.resolve(), source_location=location)) return super().unary_op(op, location) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: other = other.resolve_literal(converter=BigUIntTypeBuilder(other.source_location)) if pytypes.BigUIntType <= other.pytype: other_expr = other.resolve() elif pytypes.UInt64Type <= other.pytype: other_expr = _uint64_to_biguint(other, location) else: return NotImplemented cmp_expr = NumericComparisonExpression( source_location=location, lhs=self.resolve(), operator=NumericComparison(op.value), rhs=other_expr, ) return BoolExpressionBuilder(cmp_expr) @typing.override def binary_op( self, other: InstanceBuilder, op: BuilderBinaryOp, location: SourceLocation, *, reverse: bool, ) -> InstanceBuilder: biguint_op = _translate_biguint_math_operator(op, location) if biguint_op is None: return NotImplemented other = other.resolve_literal(converter=BigUIntTypeBuilder(other.source_location)) if pytypes.BigUIntType <= other.pytype: other_expr = other.resolve() elif pytypes.UInt64Type <= other.pytype: other_expr = _uint64_to_biguint(other, location) else: return NotImplemented lhs = self.resolve() rhs = other_expr if reverse: (lhs, rhs) = (rhs, lhs) bin_op_expr = BigUIntBinaryOperation( source_location=location, left=lhs, op=biguint_op, right=rhs ) return BigUIntExpressionBuilder(bin_op_expr) @typing.override def augmented_assignment( self, op: BuilderBinaryOp, rhs: InstanceBuilder, location: SourceLocation ) -> Statement: biguint_op = _translate_biguint_math_operator(op, location) if biguint_op is None: logger.error(f"unsupported operator for type: {op.value!r}", location=location) return dummy_statement(location) if pytypes.UInt64Type <= rhs.pytype: value = _uint64_to_biguint(rhs, location) else: value = expect.argument_of_type_else_dummy( rhs, self.pytype, resolve_literal=True ).resolve() target = self.resolve_lvalue() return BigUIntAugmentedAssignment( source_location=location, target=target, value=value, op=biguint_op, ) def _translate_biguint_math_operator( operator: BuilderBinaryOp, loc: SourceLocation ) -> BigUIntBinaryOperator | None: if operator is BuilderBinaryOp.div: logger.error( ( "To maintain semantic compatibility with Python, " "only the truncating division operator (//) is supported " ), location=loc, ) # continue traversing code to generate any further errors operator = BuilderBinaryOp.floor_div try: return BigUIntBinaryOperator(operator.value) except ValueError: return None def _uint64_to_biguint(arg_in: InstanceBuilder, location: SourceLocation) -> Expression: return intrinsic_factory.itob_as(arg_in.resolve(), wtypes.biguint_wtype, location)
algorandfoundation/puya
src/puyapy/awst_build/eb/biguint.py
Python
NOASSERTION
7,032
import typing from puya.awst.nodes import ( BinaryBooleanOperator, BooleanBinaryOperation, ConditionalExpression, Expression, Not, ) from puya.errors import CodeError from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb.bool import BoolExpressionBuilder from puyapy.awst_build.eb.factories import builder_for_instance from puyapy.awst_build.eb.interface import ( BuilderBinaryOp, BuilderComparisonOp, BuilderUnaryOp, InstanceBuilder, NodeBuilder, TypeBuilder, ) from puyapy.awst_build.utils import determine_base_type class BinaryBoolOpBuilder(InstanceBuilder): """ This builder works to defer the evaluation of a boolean binary op (ie and/or), because in some cases (unions, python literals) we can only successfully evaluate in certain contexts. For example: a = Bytes(...) b = UInt64(...) if a or b: # this is fine, even though `a or b` produces `Bytes | UInt64` ... c = a or b # compiler error You wouldn't be able to do anything with c, since in general we can't know at compile time what the type of c is, and the AVM doesn't provide any type introspection. Even if there was an op that said whether a stack item or a scratch slot etc held a bytes[] or a uint64, there are differences between logical types and physical types that need to be accounted for - for example, biguint is a bytes[] but we would need to use a different equality op b== instead of == """ def __init__( self, left: InstanceBuilder, right: InstanceBuilder, op: BinaryBooleanOperator, location: SourceLocation, *, result_type: pytypes.PyType | None = None, ): super().__init__(location) if result_type is None: # if either left or right is already a union, just produce another union if isinstance(left.pytype, pytypes.UnionType) or isinstance( right.pytype, pytypes.UnionType ): # note if left and/or right are unions this constructor will expand them for us result_type = pytypes.UnionType([left.pytype, right.pytype], location) else: # otherwise, left and right are both non-union types, so try and # compute a common base type, falling back to a union if not possible result_type = determine_base_type(left.pytype, right.pytype, location=location) self._result_type = result_type self._left = left self._right = right self._op = op @typing.override @property def pytype(self) -> pytypes.PyType: return self._result_type @typing.override def single_eval(self) -> InstanceBuilder: left = self._left.single_eval() right = self._right.single_eval() return self._evolve_builders(left, right, recalculate_type=False) @typing.override def resolve_literal(self, converter: TypeBuilder) -> InstanceBuilder: left = self._left.resolve_literal(converter) right = self._right.resolve_literal(converter) return self._evolve_builders(left, right) @typing.override def try_resolve_literal(self, converter: TypeBuilder) -> InstanceBuilder | None: left = self._left.try_resolve_literal(converter) right = self._right.try_resolve_literal(converter) if left is None or right is None: return None return self._evolve_builders(left, right) @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: bool_left = self._left.bool_eval(self._left.source_location).resolve() bool_right = self._right.bool_eval(self._right.source_location).resolve() result_expr: Expression = BooleanBinaryOperation( left=bool_left, op=self._op, right=bool_right, source_location=location ) if negate: result_expr = Not(expr=result_expr, source_location=location) return BoolExpressionBuilder(result_expr) @typing.override def bool_binary_op( self, other: InstanceBuilder, op: BinaryBooleanOperator, location: SourceLocation ) -> InstanceBuilder: return BinaryBoolOpBuilder(left=self, right=other, op=op, location=location) @typing.override def resolve(self) -> Expression: if isinstance(self.pytype, pytypes.UnionType): raise CodeError( "expression would produce a union type," " which isn't supported unless evaluating a boolean condition", self.source_location, ) result_wtype = self.pytype.checked_wtype(self.source_location) # (left:uint64 and right:uint64) => left_cache if not bool(left_cache := left) else right # (left:uint64 or right:uint64) => left_cache if bool(left_cache := left) else right left_cache = self._left.single_eval() condition = left_cache.bool_eval( self.source_location, negate=self._op is BinaryBooleanOperator.and_ ) expr_result = ConditionalExpression( condition=condition.resolve(), true_expr=left_cache.resolve(), false_expr=self._right.resolve(), wtype=result_wtype, source_location=self.source_location, ) return expr_result def _evolve_builders( self, left: InstanceBuilder, right: InstanceBuilder, *, recalculate_type: bool = True ) -> InstanceBuilder: return BinaryBoolOpBuilder( left=left, right=right, op=self._op, location=self.source_location, result_type=None if recalculate_type else self.pytype, ) # region Invalid Python syntax @typing.override def resolve_lvalue(self) -> typing.Never: raise CodeError( # message copied from Python "cannot assign to expression here. Maybe you meant '==' instead of '='?", self.source_location, ) @typing.override def delete(self, location: SourceLocation) -> typing.Never: raise CodeError( # message copied from Python "cannot delete expression", location, ) @typing.override def augmented_assignment( self, op: BuilderBinaryOp, right: InstanceBuilder, location: SourceLocation ) -> typing.Never: raise CodeError( # copied (and trimmed) from Python "illegal expression for augmented assignment", location, ) # endregion # region Forward to resolved builder def _resolve_builder(self) -> InstanceBuilder: expr = self.resolve() return builder_for_instance(self.pytype, expr) @typing.override def unary_op(self, op: BuilderUnaryOp, location: SourceLocation) -> InstanceBuilder: return self._resolve_builder().unary_op(op, location) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: return self._resolve_builder().compare(other, op, location) @typing.override def binary_op( self, other: InstanceBuilder, op: BuilderBinaryOp, location: SourceLocation, *, reverse: bool, ) -> InstanceBuilder: return self._resolve_builder().binary_op(other, op, location, reverse=reverse) @typing.override def contains(self, item: InstanceBuilder, location: SourceLocation) -> InstanceBuilder: return self._resolve_builder().contains(item, location) @typing.override def iterate(self) -> Expression: return self._resolve_builder().iterate() @typing.override def iterable_item_type(self) -> pytypes.PyType: return self._resolve_builder().iterable_item_type() @typing.override def index(self, index: InstanceBuilder, location: SourceLocation) -> InstanceBuilder: return self._resolve_builder().index(index, location) @typing.override def slice_index( self, begin_index: InstanceBuilder | None, end_index: InstanceBuilder | None, stride: InstanceBuilder | None, location: SourceLocation, ) -> InstanceBuilder: return self._resolve_builder().slice_index(begin_index, end_index, stride, location) @typing.override def to_bytes(self, location: SourceLocation) -> Expression: return self._resolve_builder().to_bytes(location) @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: return self._resolve_builder().member_access(name, location) # endregion
algorandfoundation/puya
src/puyapy/awst_build/eb/binary_bool_op.py
Python
NOASSERTION
8,845
import typing from collections.abc import Sequence import mypy.nodes from puya import log from puya.awst.nodes import ( BinaryBooleanOperator, BoolConstant, BooleanBinaryOperation, Expression, Not, NumericComparison, NumericComparisonExpression, ) from puya.parse import SourceLocation from puyapy.awst_build import intrinsic_factory, pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import NotIterableInstanceExpressionBuilder from puyapy.awst_build.eb.interface import ( BuilderComparisonOp, InstanceBuilder, LiteralBuilder, NodeBuilder, TypeBuilder, ) logger = log.get_logger(__name__) class BoolTypeBuilder(TypeBuilder): def __init__(self, location: SourceLocation): super().__init__(pytypes.BoolType, location) @typing.override def try_convert_literal( self, literal: LiteralBuilder, location: SourceLocation ) -> InstanceBuilder | None: match literal.value: case bool(literal_value): expr = BoolConstant(value=literal_value, source_location=location) return BoolExpressionBuilder(expr) return None @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.at_most_one_arg(args, location) match arg: case None: false = BoolConstant(value=False, source_location=location) return BoolExpressionBuilder(false) case InstanceBuilder(pytype=pytypes.BoolType): return arg case _: return arg.bool_eval(location) class BoolExpressionBuilder(NotIterableInstanceExpressionBuilder): def __init__(self, expr: Expression): super().__init__(pytypes.BoolType, expr) @typing.override def to_bytes(self, location: SourceLocation) -> Expression: return intrinsic_factory.itob(self.resolve(), location) @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: if not negate: return self return BoolExpressionBuilder(Not(location, self.resolve())) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: if other.pytype != pytypes.BoolType: return NotImplemented cmp_expr = NumericComparisonExpression( source_location=location, lhs=self.resolve(), operator=NumericComparison(op.value), rhs=other.resolve(), ) return BoolExpressionBuilder(cmp_expr) @typing.override def bool_binary_op( self, other: InstanceBuilder, op: BinaryBooleanOperator, location: SourceLocation ) -> InstanceBuilder: if other.pytype != pytypes.BoolType: return super().bool_binary_op(other, op, location) result = BooleanBinaryOperation( left=self.resolve(), op=op, right=other.resolve(), source_location=location, ) return BoolExpressionBuilder(result)
algorandfoundation/puya
src/puyapy/awst_build/eb/bool.py
Python
NOASSERTION
3,320
import base64 import typing from collections.abc import Sequence import mypy.nodes from puya import algo_constants, log, utils from puya.awst.nodes import ( BytesAugmentedAssignment, BytesBinaryOperation, BytesBinaryOperator, BytesConstant, BytesEncoding, BytesUnaryOperation, BytesUnaryOperator, CallArg, Expression, IndexExpression, IntersectionSliceExpression, PuyaLibCall, PuyaLibFunction, Statement, ) from puya.errors import CodeError from puya.parse import SourceLocation from puyapy.awst_build import intrinsic_factory, pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import FunctionBuilder, InstanceExpressionBuilder from puyapy.awst_build.eb._utils import ( compare_bytes, dummy_statement, dummy_value, resolve_negative_literal_index, ) from puyapy.awst_build.eb.bool import BoolExpressionBuilder from puyapy.awst_build.eb.interface import ( BuilderBinaryOp, BuilderComparisonOp, BuilderUnaryOp, InstanceBuilder, LiteralBuilder, NodeBuilder, TypeBuilder, ) from puyapy.awst_build.eb.uint64 import UInt64ExpressionBuilder logger = log.get_logger(__name__) class BytesTypeBuilder(TypeBuilder): def __init__(self, location: SourceLocation): super().__init__(pytypes.BytesType, location) @typing.override def try_convert_literal( self, literal: LiteralBuilder, location: SourceLocation ) -> InstanceBuilder | None: match literal.value: case bytes(literal_value): if len(literal_value) > algo_constants.MAX_BYTES_LENGTH: logger.error( "bytes constant exceeds max length", location=literal.source_location ) expr = BytesConstant( value=literal_value, encoding=BytesEncoding.unknown, source_location=location ) return BytesExpressionBuilder(expr) return None @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.at_most_one_arg(args, location) match arg: case InstanceBuilder(pytype=pytypes.BytesLiteralType): return arg.resolve_literal(BytesTypeBuilder(location)) case None: value = BytesConstant( value=b"", encoding=BytesEncoding.unknown, source_location=location ) return BytesExpressionBuilder(value) case other: return expect.not_this_type( other, default=expect.default_dummy_value(self.produces()) ) @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: """Handle self.name""" match name: case "from_base32": return _FromEncodedStr(location, BytesEncoding.base32) case "from_base64": return _FromEncodedStr(location, BytesEncoding.base64) case "from_hex": return _FromEncodedStr(location, BytesEncoding.base16) case _: return super().member_access(name, location) class _FromEncodedStr(FunctionBuilder): def __init__( self, location: SourceLocation, encoding: typing.Literal[BytesEncoding.base16, BytesEncoding.base32, BytesEncoding.base64], ): super().__init__(location=location) self.encoding = encoding @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: arg = expect.exactly_one_arg(args, location, default=expect.default_none) if arg is not None: encoded_value = expect.simple_string_literal(arg, default=expect.default_none) if encoded_value is not None: match self.encoding: case BytesEncoding.base64: if not utils.valid_base64(encoded_value): logger.error("invalid base64 value", location=arg.source_location) bytes_value = b"" else: bytes_value = base64.b64decode(encoded_value) case BytesEncoding.base32: if not utils.valid_base32(encoded_value): logger.error("invalid base32 value", location=arg.source_location) bytes_value = b"" else: bytes_value = base64.b32decode(encoded_value) case BytesEncoding.base16: encoded_value = encoded_value.upper() if not utils.valid_base16(encoded_value): logger.error("invalid base16 value", location=arg.source_location) bytes_value = b"" else: bytes_value = base64.b16decode(encoded_value) case _: typing.assert_never(self.encoding) expr = BytesConstant( source_location=location, value=bytes_value, encoding=self.encoding, ) return BytesExpressionBuilder(expr) return dummy_value(pytypes.BytesType, location) class BytesExpressionBuilder(InstanceExpressionBuilder[pytypes.RuntimeType]): def __init__(self, expr: Expression): super().__init__(pytypes.BytesType, expr) @typing.override def to_bytes(self, location: SourceLocation) -> Expression: return self.resolve() def length(self, location: SourceLocation) -> InstanceBuilder: len_call = intrinsic_factory.bytes_len(expr=self.resolve(), loc=location) return UInt64ExpressionBuilder(len_call) @typing.override def member_access(self, name: str, location: SourceLocation) -> InstanceBuilder: match name: case "length": return self.length(location) raise CodeError(f"unrecognised member of {self.pytype}: {name}", location) @typing.override def index(self, index: InstanceBuilder, location: SourceLocation) -> InstanceBuilder: length = self.length(location) index = resolve_negative_literal_index(index, length, location) expr = IndexExpression( base=self.resolve(), index=index.resolve(), wtype=self.pytype.wtype, source_location=location, ) return BytesExpressionBuilder(expr) @typing.override def slice_index( self, begin_index: InstanceBuilder | None, end_index: InstanceBuilder | None, stride: InstanceBuilder | None, location: SourceLocation, ) -> InstanceBuilder: if stride is not None: logger.error("stride is not supported", location=stride.source_location) slice_expr: Expression = IntersectionSliceExpression( base=self.resolve(), begin_index=_eval_slice_component(begin_index), end_index=_eval_slice_component(end_index), wtype=self.pytype.wtype, source_location=location, ) return BytesExpressionBuilder(slice_expr) @typing.override def iterate(self) -> Expression: return self.resolve() @typing.override def iterable_item_type(self) -> pytypes.PyType: return pytypes.BytesType @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: len_expr = intrinsic_factory.bytes_len(self.resolve(), location) len_builder = UInt64ExpressionBuilder(len_expr) return len_builder.bool_eval(location, negate=negate) @typing.override def unary_op(self, op: BuilderUnaryOp, location: SourceLocation) -> InstanceBuilder: if op == BuilderUnaryOp.bit_invert: return BytesExpressionBuilder( BytesUnaryOperation( expr=self.resolve(), op=BytesUnaryOperator.bit_invert, source_location=location, ) ) return super().unary_op(op, location) @typing.override def contains(self, item: InstanceBuilder, location: SourceLocation) -> InstanceBuilder: item_expr = expect.argument_of_type_else_dummy( item, pytypes.BytesType, resolve_literal=True ).resolve() is_substring_expr = PuyaLibCall( func=PuyaLibFunction.is_substring, args=[CallArg(value=item_expr, name=None), CallArg(value=self.resolve(), name=None)], source_location=location, ) return BoolExpressionBuilder(is_substring_expr) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: other = other.resolve_literal(converter=BytesTypeBuilder(other.source_location)) return compare_bytes(self=self, op=op, other=other, source_location=location) @typing.override def binary_op( self, other: InstanceBuilder, op: BuilderBinaryOp, location: SourceLocation, *, reverse: bool, ) -> InstanceBuilder: bytes_op = _translate_binary_bytes_operator(op) if bytes_op is None: return NotImplemented other = other.resolve_literal(converter=BytesTypeBuilder(other.source_location)) if not (pytypes.BytesType <= other.pytype): return NotImplemented lhs = self.resolve() rhs = other.resolve() if reverse: (lhs, rhs) = (rhs, lhs) bin_op_expr = BytesBinaryOperation( source_location=location, left=lhs, right=rhs, op=bytes_op ) return BytesExpressionBuilder(bin_op_expr) @typing.override def augmented_assignment( self, op: BuilderBinaryOp, rhs: InstanceBuilder, location: SourceLocation ) -> Statement: bytes_op = _translate_binary_bytes_operator(op) if bytes_op is None: logger.error(f"unsupported operator for type: {op.value!r}", location=location) return dummy_statement(location) rhs = expect.argument_of_type_else_dummy(rhs, self.pytype, resolve_literal=True) target = self.resolve_lvalue() return BytesAugmentedAssignment( target=target, op=bytes_op, value=rhs.resolve(), source_location=location, ) def _translate_binary_bytes_operator(operator: BuilderBinaryOp) -> BytesBinaryOperator | None: try: return BytesBinaryOperator(operator.value) except ValueError: return None def _eval_slice_component(val: NodeBuilder | None) -> Expression | None | int: match val: case None: return None case LiteralBuilder(value=int(int_value)): return int_value return expect.argument_of_type_else_dummy(val, pytypes.UInt64Type).resolve()
algorandfoundation/puya
src/puyapy/awst_build/eb/bytes.py
Python
NOASSERTION
11,433
import typing from collections.abc import Mapping, Sequence import mypy.nodes from puya.awst.nodes import CompiledContract, CompiledLogicSig, Expression from puya.awst.txn_fields import TxnField from puya.log import get_logger from puya.parse import SourceLocation from puya.program_refs import LogicSigReference from puyapy.awst_build import pytypes from puyapy.awst_build.eb import _expect as expect from puyapy.awst_build.eb._base import FunctionBuilder from puyapy.awst_build.eb._utils import dummy_value from puyapy.awst_build.eb.dict_ import DictLiteralBuilder from puyapy.awst_build.eb.interface import InstanceBuilder, NodeBuilder from puyapy.awst_build.eb.logicsig import LogicSigExpressionBuilder from puyapy.awst_build.eb.tuple import TupleExpressionBuilder from puyapy.awst_build.utils import get_arg_mapping logger = get_logger(__name__) # these names should match pytypes CompiledContract definition PROGRAM_FIELDS = { "approval_program": TxnField.ApprovalProgramPages, "clear_state_program": TxnField.ClearStateProgramPages, } APP_ALLOCATION_FIELDS = { "extra_program_pages": TxnField.ExtraProgramPages, "global_bytes": TxnField.GlobalNumByteSlice, "global_uints": TxnField.GlobalNumUint, "local_bytes": TxnField.LocalNumByteSlice, "local_uints": TxnField.LocalNumUint, } class CompiledContractExpressionBuilder(TupleExpressionBuilder): def __init__(self, expr: Expression) -> None: super().__init__(expr, pytypes.CompiledContractType) class CompiledLogicSigExpressionBuilder(TupleExpressionBuilder): def __init__(self, expr: Expression) -> None: super().__init__(expr, pytypes.CompiledLogicSigType) _TEMPLATE_VAR_KWARG_NAMES = [ "template_vars", "template_vars_prefix", ] class CompileContractFunctionBuilder(FunctionBuilder): @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: contract_arg_name = "contract" arg_map, _ = get_arg_mapping( args=args, arg_names=arg_names, call_location=location, raise_on_missing=False, required_positional_names=[contract_arg_name], optional_kw_only=[ *APP_ALLOCATION_FIELDS, *_TEMPLATE_VAR_KWARG_NAMES, ], ) prefix, template_vars = _extract_prefix_template_args(arg_map) allocation_overrides = {} for python_name, field in APP_ALLOCATION_FIELDS.items(): if arg := arg_map.get(python_name): allocation_overrides[field] = expect.argument_of_type_else_dummy( arg, pytypes.UInt64Type, resolve_literal=True ).resolve() result_type = pytypes.CompiledContractType match arg_map[contract_arg_name]: case NodeBuilder(pytype=pytypes.TypeType(typ=pytypes.ContractType() as contract_typ)): contract = contract_typ.name case invalid_or_none: if invalid_or_none is None: # if None (=missing), then error message already logged by get_arg_mapping return dummy_value(result_type, location) return expect.not_this_type( invalid_or_none, default=expect.default_dummy_value(result_type) ) return CompiledContractExpressionBuilder( CompiledContract( contract=contract, allocation_overrides=allocation_overrides, prefix=prefix, template_variables=template_vars, wtype=result_type.wtype, source_location=location, ) ) class CompileLogicSigFunctionBuilder(FunctionBuilder): @typing.override def call( self, args: Sequence[NodeBuilder], arg_kinds: list[mypy.nodes.ArgKind], arg_names: list[str | None], location: SourceLocation, ) -> InstanceBuilder: logicsig_arg_name = "logicsig" arg_map, _ = get_arg_mapping( args=args, arg_names=arg_names, call_location=location, raise_on_missing=False, required_positional_names=[logicsig_arg_name], optional_kw_only=_TEMPLATE_VAR_KWARG_NAMES, ) match arg_map.get(logicsig_arg_name): case LogicSigExpressionBuilder(ref=logic_sig): pass case missing_or_invalid: logic_sig = LogicSigReference("") # dummy reference # if None (=missing), then error message already logged by get_arg_mapping if missing_or_invalid is not None: expect.not_this_type(missing_or_invalid, default=expect.default_none) prefix, template_vars = _extract_prefix_template_args(arg_map) return CompiledLogicSigExpressionBuilder( CompiledLogicSig( logic_sig=logic_sig, prefix=prefix, template_variables=template_vars, wtype=pytypes.CompiledLogicSigType.wtype, source_location=location, ) ) def _extract_prefix_template_args( name_args: dict[str, NodeBuilder], ) -> tuple[str | None, Mapping[str, Expression]]: prefix: str | None = None template_vars: Mapping[str, Expression] = {} if template_vars_node := name_args.get("template_vars"): if isinstance(template_vars_node, DictLiteralBuilder): template_vars = {k: v.resolve() for k, v in template_vars_node.mapping.items()} else: expect.not_this_type(template_vars_node, default=expect.default_none) if prefix_node := name_args.get("template_vars_prefix"): prefix = expect.simple_string_literal(prefix_node, default=expect.default_none) return prefix, template_vars
algorandfoundation/puya
src/puyapy/awst_build/eb/compiled.py
Python
NOASSERTION
5,997
import typing from puya.awst.nodes import ( BinaryBooleanOperator, ConditionalExpression, Expression, Lvalue, Statement, ) from puya.errors import CodeError from puya.parse import SourceLocation from puyapy.awst_build import pytypes from puyapy.awst_build.eb.bool import BoolExpressionBuilder from puyapy.awst_build.eb.factories import builder_for_instance from puyapy.awst_build.eb.interface import ( BuilderBinaryOp, BuilderComparisonOp, BuilderUnaryOp, InstanceBuilder, LiteralBuilder, NodeBuilder, TypeBuilder, ) class ConditionalLiteralBuilder(InstanceBuilder): def __init__( self, *, true_literal: LiteralBuilder, false_literal: LiteralBuilder, condition: InstanceBuilder, location: SourceLocation, ): super().__init__(location) self._pytype = _common_base(true_literal.pytype, false_literal.pytype, location) self._true_literal = true_literal self._false_literal = false_literal self._condition = condition @typing.override @property def pytype(self) -> pytypes.PyType: return self._true_literal.pytype @typing.override def resolve(self) -> Expression: true_expr = self._true_literal.resolve() false_expr = self._false_literal.resolve() condition_expr = self._condition.resolve() return ConditionalExpression( condition=condition_expr, true_expr=true_expr, false_expr=false_expr, source_location=self.source_location, ) @typing.override def resolve_literal(self, converter: TypeBuilder) -> InstanceBuilder: true_b = converter.convert_literal( literal=self._true_literal, location=converter.source_location ) false_b = converter.convert_literal( literal=self._false_literal, location=converter.source_location ) return self._resolve_literals(true_b, false_b) @typing.override def try_resolve_literal(self, converter: TypeBuilder) -> InstanceBuilder | None: true_b = converter.try_convert_literal( literal=self._true_literal, location=converter.source_location ) false_b = converter.try_convert_literal( literal=self._false_literal, location=converter.source_location ) if true_b is None or false_b is None: return None return self._resolve_literals(true_b, false_b) def _resolve_literals( self, true_b: InstanceBuilder, false_b: InstanceBuilder ) -> InstanceBuilder: result_pytype = _common_base(true_b.pytype, false_b.pytype, self.source_location) result_pytype = true_b.pytype true_expr = true_b.resolve() false_expr = false_b.resolve() condition_expr = self._condition.resolve() result_expr = ConditionalExpression( condition=condition_expr, true_expr=true_expr, false_expr=false_expr, source_location=self.source_location, ) return builder_for_instance(result_pytype, result_expr) @typing.override def resolve_lvalue(self) -> Lvalue: raise CodeError("cannot assign to literal", self.source_location) @typing.override def delete(self, location: SourceLocation) -> Statement: raise CodeError("cannot delete literal", location) @typing.override def unary_op(self, op: BuilderUnaryOp, location: SourceLocation) -> InstanceBuilder: transformed_true = self._true_literal.unary_op(op, location) transformed_false = self._false_literal.unary_op(op, location) return ConditionalLiteralBuilder( true_literal=transformed_true, false_literal=transformed_false, condition=self._condition, location=location, ) @typing.override def compare( self, other: InstanceBuilder, op: BuilderComparisonOp, location: SourceLocation ) -> InstanceBuilder: other = other.single_eval() transformed_true = self._true_literal.compare(other, op, location) transformed_false = self._false_literal.compare(other, op, location) if transformed_true is NotImplemented or transformed_false is NotImplemented: return NotImplemented return ConditionalLiteralBuilder( true_literal=transformed_true, false_literal=transformed_false, condition=self._condition, location=location, ) @typing.override def binary_op( self, other: InstanceBuilder, op: BuilderBinaryOp, location: SourceLocation, *, reverse: bool, ) -> InstanceBuilder: other = other.single_eval() transformed_true = self._true_literal.binary_op(other, op, location, reverse=reverse) transformed_false = self._false_literal.binary_op(other, op, location, reverse=reverse) if transformed_true is NotImplemented or transformed_false is NotImplemented: return NotImplemented return ConditionalLiteralBuilder( true_literal=transformed_true, false_literal=transformed_false, condition=self._condition, location=location, ) @typing.override def bool_binary_op( self, other: InstanceBuilder, op: BinaryBooleanOperator, location: SourceLocation ) -> InstanceBuilder: return super().bool_binary_op(other, op, location) @typing.override def augmented_assignment( self, op: BuilderBinaryOp, rhs: InstanceBuilder, location: SourceLocation ) -> Statement: raise CodeError("cannot assign to literal", location) @typing.override def to_bytes(self, location: SourceLocation) -> Expression: true_expr = self._true_literal.to_bytes(location) false_expr = self._false_literal.to_bytes(location) condition_expr = self._condition.resolve() return ConditionalExpression( condition=condition_expr, true_expr=true_expr, false_expr=false_expr, source_location=self.source_location, ) @typing.override def member_access(self, name: str, location: SourceLocation) -> NodeBuilder: transformed_true = self._true_literal.member_access(name, location) transformed_false = self._false_literal.member_access(name, location) return ConditionalLiteralBuilder( true_literal=transformed_true, false_literal=transformed_false, condition=self._condition, location=location, ) @typing.override def bool_eval(self, location: SourceLocation, *, negate: bool = False) -> InstanceBuilder: true_expr = self._true_literal.bool_eval(location, negate=negate).resolve() false_expr = self._false_literal.bool_eval(location, negate=negate).resolve() condition_expr = self._condition.resolve() return BoolExpressionBuilder( ConditionalExpression( condition=condition_expr, true_expr=true_expr, false_expr=false_expr, source_location=location, ) ) @typing.override def contains(self, item: InstanceBuilder, location: SourceLocation) -> InstanceBuilder: item = item.single_eval() transformed_true = self._true_literal.contains(item, location) transformed_false = self._false_literal.contains(item, location) return ConditionalLiteralBuilder( true_literal=transformed_true, false_literal=transformed_false, condition=self._condition, location=location, ) @typing.override def iterate(self) -> typing.Never: raise CodeError("cannot iterate literal") @typing.override def iterable_item_type(self) -> typing.Never: self.iterate() @typing.override def index(self, index: InstanceBuilder, location: SourceLocation) -> InstanceBuilder: index = index.single_eval() transformed_true = self._true_literal.index(index, location) transformed_false = self._false_literal.index(index, location) return ConditionalLiteralBuilder( true_literal=transformed_true, false_literal=transformed_false, condition=self._condition, location=location, ) @typing.override def slice_index( self, begin_index: InstanceBuilder | None, end_index: InstanceBuilder | None, stride: InstanceBuilder | None, location: SourceLocation, ) -> InstanceBuilder: if begin_index is not None: begin_index = begin_index.single_eval() if end_index is not None: end_index = end_index.single_eval() if stride is not None: stride = stride.single_eval() transformed_true = self._true_literal.slice_index( begin_index=begin_index, end_index=end_index, stride=stride, location=location ) transformed_false = self._false_literal.slice_index( begin_index=begin_index, end_index=end_index, stride=stride, location=location ) return ConditionalLiteralBuilder( true_literal=transformed_true, false_literal=transformed_false, condition=self._condition, location=location, ) @typing.override def single_eval(self) -> InstanceBuilder: condition = self._condition.single_eval() return ConditionalLiteralBuilder( true_literal=self._true_literal, false_literal=self._false_literal, condition=condition, location=self.source_location, ) def _common_base(a: pytypes.PyType, b: pytypes.PyType, location: SourceLocation) -> pytypes.PyType: if a <= b: return a elif b < a: return b else: raise CodeError("type mismatch", location)
algorandfoundation/puya
src/puyapy/awst_build/eb/conditional_literal.py
Python
NOASSERTION
10,077